[ 506.355431] env[63538]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=63538) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 506.355787] env[63538]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=63538) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 506.355834] env[63538]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=63538) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 506.356183] env[63538]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 506.455553] env[63538]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=63538) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 506.466163] env[63538]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=63538) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 507.072674] env[63538]: INFO nova.virt.driver [None req-568496ad-a211-4188-8796-5a0372a6007b None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 507.146604] env[63538]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 507.146778] env[63538]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 507.146864] env[63538]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=63538) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 510.403110] env[63538]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-11672f16-fcc7-4b89-ae45-fb165f181ed0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.419653] env[63538]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=63538) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 510.419809] env[63538]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-8fcb030f-d6bf-4bcd-a028-a1743650689e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.453473] env[63538]: INFO oslo_vmware.api [-] Successfully established new session; session ID is c8ecb. [ 510.453678] env[63538]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.307s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 510.454266] env[63538]: INFO nova.virt.vmwareapi.driver [None req-568496ad-a211-4188-8796-5a0372a6007b None None] VMware vCenter version: 7.0.3 [ 510.457716] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c0578cc-7093-42ea-a55c-3b49c6350a4f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.476320] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c674306a-3d51-403e-8c5b-9b0097d4ded4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.482460] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c5502f-2cd0-4b8e-87c0-eb0c6ff7590a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.489278] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5131b7f-5a4e-46cf-a146-d472bdfc6738 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.505163] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb8421b-04ea-473f-8960-acf29e7f7f1e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.511752] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2954b645-4cba-461b-af49-df3b8fa01450 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.542525] env[63538]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-e1cc3afd-0628-4a48-9161-95a140032e0c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.548162] env[63538]: DEBUG nova.virt.vmwareapi.driver [None req-568496ad-a211-4188-8796-5a0372a6007b None None] Extension org.openstack.compute already exists. {{(pid=63538) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:225}} [ 510.550894] env[63538]: INFO nova.compute.provider_config [None req-568496ad-a211-4188-8796-5a0372a6007b None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 511.054183] env[63538]: DEBUG nova.context [None req-568496ad-a211-4188-8796-5a0372a6007b None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),73d243e6-b47d-4c6d-9d23-34ebf04e0680(cell1) {{(pid=63538) load_cells /opt/stack/nova/nova/context.py:464}} [ 511.056463] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 511.056685] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 511.057412] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 511.057858] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] Acquiring lock "73d243e6-b47d-4c6d-9d23-34ebf04e0680" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 511.058066] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] Lock "73d243e6-b47d-4c6d-9d23-34ebf04e0680" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 511.059182] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] Lock "73d243e6-b47d-4c6d-9d23-34ebf04e0680" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 511.080075] env[63538]: INFO dbcounter [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] Registered counter for database nova_cell0 [ 511.088566] env[63538]: INFO dbcounter [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] Registered counter for database nova_cell1 [ 511.091859] env[63538]: DEBUG oslo_db.sqlalchemy.engines [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=63538) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 511.092532] env[63538]: DEBUG oslo_db.sqlalchemy.engines [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=63538) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 511.097053] env[63538]: ERROR nova.db.main.api [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 511.097053] env[63538]: result = function(*args, **kwargs) [ 511.097053] env[63538]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 511.097053] env[63538]: return func(*args, **kwargs) [ 511.097053] env[63538]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 511.097053] env[63538]: result = fn(*args, **kwargs) [ 511.097053] env[63538]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 511.097053] env[63538]: return f(*args, **kwargs) [ 511.097053] env[63538]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 511.097053] env[63538]: return db.service_get_minimum_version(context, binaries) [ 511.097053] env[63538]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 511.097053] env[63538]: _check_db_access() [ 511.097053] env[63538]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 511.097053] env[63538]: stacktrace = ''.join(traceback.format_stack()) [ 511.097053] env[63538]: [ 511.098202] env[63538]: ERROR nova.db.main.api [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 511.098202] env[63538]: result = function(*args, **kwargs) [ 511.098202] env[63538]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 511.098202] env[63538]: return func(*args, **kwargs) [ 511.098202] env[63538]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 511.098202] env[63538]: result = fn(*args, **kwargs) [ 511.098202] env[63538]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 511.098202] env[63538]: return f(*args, **kwargs) [ 511.098202] env[63538]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 511.098202] env[63538]: return db.service_get_minimum_version(context, binaries) [ 511.098202] env[63538]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 511.098202] env[63538]: _check_db_access() [ 511.098202] env[63538]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 511.098202] env[63538]: stacktrace = ''.join(traceback.format_stack()) [ 511.098202] env[63538]: [ 511.098602] env[63538]: WARNING nova.objects.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 511.098773] env[63538]: WARNING nova.objects.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] Failed to get minimum service version for cell 73d243e6-b47d-4c6d-9d23-34ebf04e0680 [ 511.099255] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] Acquiring lock "singleton_lock" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 511.099424] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] Acquired lock "singleton_lock" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 511.099679] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] Releasing lock "singleton_lock" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 511.100021] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] Full set of CONF: {{(pid=63538) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 511.100176] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ******************************************************************************** {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 511.100308] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] Configuration options gathered from: {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 511.100519] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2808}} [ 511.100781] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 511.100909] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ================================================================================ {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2811}} [ 511.101145] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] allow_resize_to_same_host = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.101323] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] arq_binding_timeout = 300 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.101458] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] backdoor_port = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.101591] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] backdoor_socket = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.101759] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] block_device_allocate_retries = 60 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.101932] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] block_device_allocate_retries_interval = 3 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.102166] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cert = self.pem {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.102370] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.102549] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] compute_monitors = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.102723] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] config_dir = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.102899] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] config_drive_format = iso9660 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.103054] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.103233] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] config_source = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.103403] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] console_host = devstack {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.103573] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] control_exchange = nova {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.103740] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cpu_allocation_ratio = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.103906] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] daemon = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.104091] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] debug = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.104259] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] default_access_ip_network_name = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.104428] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] default_availability_zone = nova {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.104588] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] default_ephemeral_format = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.104752] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] default_green_pool_size = 1000 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.104993] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.105185] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] default_schedule_zone = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.105350] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] disk_allocation_ratio = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.105513] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] enable_new_services = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.105698] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] enabled_apis = ['osapi_compute'] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.105871] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] enabled_ssl_apis = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.106049] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] flat_injected = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.106219] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] force_config_drive = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.106382] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] force_raw_images = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.106558] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] graceful_shutdown_timeout = 5 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.106721] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] heal_instance_info_cache_interval = 60 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.106946] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] host = cpu-1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.107143] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.107317] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] initial_disk_allocation_ratio = 1.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.107484] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] initial_ram_allocation_ratio = 1.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.107699] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.107866] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] instance_build_timeout = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.108044] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] instance_delete_interval = 300 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.108217] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] instance_format = [instance: %(uuid)s] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.108385] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] instance_name_template = instance-%08x {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.108549] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] instance_usage_audit = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.108724] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] instance_usage_audit_period = month {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.108926] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.109123] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] instances_path = /opt/stack/data/nova/instances {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.109378] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] internal_service_availability_zone = internal {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.109455] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] key = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.109617] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] live_migration_retry_count = 30 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.109788] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] log_color = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.109957] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] log_config_append = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.110163] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.110333] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] log_dir = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.110496] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] log_file = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.110629] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] log_options = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.110825] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] log_rotate_interval = 1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.111024] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] log_rotate_interval_type = days {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.111204] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] log_rotation_type = none {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.111338] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.111469] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.111644] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.111818] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.111950] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.112129] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] long_rpc_timeout = 1800 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.112293] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] max_concurrent_builds = 10 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.112454] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] max_concurrent_live_migrations = 1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.112612] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] max_concurrent_snapshots = 5 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.112771] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] max_local_block_devices = 3 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.112933] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] max_logfile_count = 30 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.113110] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] max_logfile_size_mb = 200 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.113275] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] maximum_instance_delete_attempts = 5 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.113446] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] metadata_listen = 0.0.0.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.113615] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] metadata_listen_port = 8775 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.113821] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] metadata_workers = 2 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.113992] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] migrate_max_retries = -1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.114179] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] mkisofs_cmd = genisoimage {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.114393] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] my_block_storage_ip = 10.180.1.21 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.114529] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] my_ip = 10.180.1.21 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.114693] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] network_allocate_retries = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.114876] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.115060] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] osapi_compute_listen = 0.0.0.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.115230] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] osapi_compute_listen_port = 8774 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.115401] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] osapi_compute_unique_server_name_scope = {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.115570] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] osapi_compute_workers = 2 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.115736] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] password_length = 12 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.115959] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] periodic_enable = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.116079] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] periodic_fuzzy_delay = 60 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.116258] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] pointer_model = usbtablet {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.116430] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] preallocate_images = none {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.116591] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] publish_errors = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.116722] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] pybasedir = /opt/stack/nova {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.116880] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ram_allocation_ratio = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.117061] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] rate_limit_burst = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.117236] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] rate_limit_except_level = CRITICAL {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.117398] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] rate_limit_interval = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.117557] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] reboot_timeout = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.117717] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] reclaim_instance_interval = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.117875] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] record = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.118061] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] reimage_timeout_per_gb = 60 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.118235] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] report_interval = 120 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.118398] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] rescue_timeout = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.118560] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] reserved_host_cpus = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.118725] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] reserved_host_disk_mb = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.118918] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] reserved_host_memory_mb = 512 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.119100] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] reserved_huge_pages = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.119270] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] resize_confirm_window = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.119438] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] resize_fs_using_block_device = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.119595] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] resume_guests_state_on_host_boot = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.119767] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.119934] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] rpc_response_timeout = 60 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.120119] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] run_external_periodic_tasks = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.120296] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] running_deleted_instance_action = reap {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.120459] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] running_deleted_instance_poll_interval = 1800 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.120620] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] running_deleted_instance_timeout = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.120800] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] scheduler_instance_sync_interval = 120 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.120986] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] service_down_time = 720 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.121178] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] servicegroup_driver = db {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.121339] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] shell_completion = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.121505] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] shelved_offload_time = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.121668] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] shelved_poll_interval = 3600 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.121873] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] shutdown_timeout = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.122069] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] source_is_ipv6 = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.122241] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ssl_only = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.122500] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.122672] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] sync_power_state_interval = 600 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.122838] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] sync_power_state_pool_size = 1000 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.123022] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] syslog_log_facility = LOG_USER {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.123188] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] tempdir = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.123351] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] timeout_nbd = 10 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.123521] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] transport_url = **** {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.123683] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] update_resources_interval = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.123846] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] use_cow_images = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.124020] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] use_eventlog = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.124191] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] use_journal = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.124353] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] use_json = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.124516] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] use_rootwrap_daemon = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.124680] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] use_stderr = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.124846] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] use_syslog = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.125020] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vcpu_pin_set = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.125197] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vif_plugging_is_fatal = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.125370] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vif_plugging_timeout = 300 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.125541] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] virt_mkfs = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.125704] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] volume_usage_poll_interval = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.125889] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] watch_log_file = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.126091] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] web = /usr/share/spice-html5 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 511.126286] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_concurrency.disable_process_locking = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.126578] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.126765] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.126939] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.127129] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.127305] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.127475] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.127661] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api.auth_strategy = keystone {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.127833] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api.compute_link_prefix = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.128027] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.128215] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api.dhcp_domain = novalocal {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.128392] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api.enable_instance_password = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.128562] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api.glance_link_prefix = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.128752] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.128957] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.129152] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api.instance_list_per_project_cells = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.129324] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api.list_records_by_skipping_down_cells = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.129494] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api.local_metadata_per_cell = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.129670] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api.max_limit = 1000 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.129846] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api.metadata_cache_expiration = 15 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.130042] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api.neutron_default_tenant_id = default {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.130224] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api.response_validation = warn {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.130395] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api.use_neutron_default_nets = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.130568] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.130742] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.130944] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.131143] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.131328] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api.vendordata_dynamic_targets = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.131497] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api.vendordata_jsonfile_path = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.131686] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.131886] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.backend = dogpile.cache.memcached {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.132074] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.backend_argument = **** {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.132255] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.config_prefix = cache.oslo {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.132431] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.dead_timeout = 60.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.132599] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.debug_cache_backend = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.132765] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.enable_retry_client = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.132931] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.enable_socket_keepalive = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.133121] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.enabled = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.133291] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.enforce_fips_mode = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.133458] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.expiration_time = 600 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.133623] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.hashclient_retry_attempts = 2 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.133792] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.hashclient_retry_delay = 1.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.133960] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.memcache_dead_retry = 300 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.134141] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.memcache_password = **** {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.134311] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.134476] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.134641] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.memcache_pool_maxsize = 10 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.134806] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.134973] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.memcache_sasl_enabled = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.135171] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.135343] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.memcache_socket_timeout = 1.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.135505] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.memcache_username = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.135672] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.proxies = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.135837] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.redis_db = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.136007] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.redis_password = **** {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.136190] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.redis_sentinel_service_name = mymaster {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.136368] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.136537] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.redis_server = localhost:6379 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.136704] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.redis_socket_timeout = 1.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.136866] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.redis_username = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.137052] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.retry_attempts = 2 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.137228] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.retry_delay = 0.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.137410] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.socket_keepalive_count = 1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.137604] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.socket_keepalive_idle = 1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.137775] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.socket_keepalive_interval = 1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.137940] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.tls_allowed_ciphers = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.138117] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.tls_cafile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.138282] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.tls_certfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.138446] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.tls_enabled = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.138606] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cache.tls_keyfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.138780] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cinder.auth_section = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.138960] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cinder.auth_type = password {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.139139] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cinder.cafile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.139318] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cinder.catalog_info = volumev3::publicURL {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.139481] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cinder.certfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.139648] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cinder.collect_timing = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.139814] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cinder.cross_az_attach = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.139980] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cinder.debug = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.140158] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cinder.endpoint_template = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.140327] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cinder.http_retries = 3 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.140493] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cinder.insecure = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.140656] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cinder.keyfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.140861] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cinder.os_region_name = RegionOne {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.141049] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cinder.split_loggers = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.141219] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cinder.timeout = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.141395] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.141557] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] compute.cpu_dedicated_set = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.141751] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] compute.cpu_shared_set = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.141908] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] compute.image_type_exclude_list = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.142104] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.142275] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] compute.max_concurrent_disk_ops = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.142439] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] compute.max_disk_devices_to_attach = -1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.142604] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.142775] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.142944] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] compute.resource_provider_association_refresh = 300 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.143121] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.143290] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] compute.shutdown_retry_interval = 10 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.143472] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.143654] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] conductor.workers = 2 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.143834] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] console.allowed_origins = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.144008] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] console.ssl_ciphers = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.144190] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] console.ssl_minimum_version = default {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.144364] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] consoleauth.enforce_session_timeout = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.144536] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] consoleauth.token_ttl = 600 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.144713] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cyborg.cafile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.144875] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cyborg.certfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.145056] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cyborg.collect_timing = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.145225] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cyborg.connect_retries = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.145388] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cyborg.connect_retry_delay = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.145551] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cyborg.endpoint_override = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.145717] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cyborg.insecure = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.145880] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cyborg.keyfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.146057] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cyborg.max_version = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.146226] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cyborg.min_version = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.146384] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cyborg.region_name = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.146546] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cyborg.retriable_status_codes = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.146707] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cyborg.service_name = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.146880] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cyborg.service_type = accelerator {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.147061] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cyborg.split_loggers = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.147231] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cyborg.status_code_retries = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.147393] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cyborg.status_code_retry_delay = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.147555] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cyborg.timeout = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.147739] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.147904] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] cyborg.version = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.148099] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] database.backend = sqlalchemy {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.148277] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] database.connection = **** {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.148447] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] database.connection_debug = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.148618] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] database.connection_parameters = {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.148807] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] database.connection_recycle_time = 3600 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.148995] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] database.connection_trace = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.149183] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] database.db_inc_retry_interval = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.149351] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] database.db_max_retries = 20 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.149520] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] database.db_max_retry_interval = 10 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.149685] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] database.db_retry_interval = 1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.149850] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] database.max_overflow = 50 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.150027] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] database.max_pool_size = 5 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.150199] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] database.max_retries = 10 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.150371] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.150534] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] database.mysql_wsrep_sync_wait = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.150694] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] database.pool_timeout = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.150899] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] database.retry_interval = 10 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.151078] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] database.slave_connection = **** {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.151253] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] database.sqlite_synchronous = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.151417] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] database.use_db_reconnect = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.151597] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api_database.backend = sqlalchemy {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.151780] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api_database.connection = **** {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.151972] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api_database.connection_debug = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.152169] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api_database.connection_parameters = {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.152340] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api_database.connection_recycle_time = 3600 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.152508] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api_database.connection_trace = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.152676] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api_database.db_inc_retry_interval = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.152842] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api_database.db_max_retries = 20 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.153019] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api_database.db_max_retry_interval = 10 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.153216] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api_database.db_retry_interval = 1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.153390] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api_database.max_overflow = 50 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.153555] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api_database.max_pool_size = 5 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.153721] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api_database.max_retries = 10 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.153893] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.154084] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.154252] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api_database.pool_timeout = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.154419] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api_database.retry_interval = 10 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.154579] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api_database.slave_connection = **** {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.154745] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] api_database.sqlite_synchronous = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.154924] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] devices.enabled_mdev_types = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.155124] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.155301] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ephemeral_storage_encryption.default_format = luks {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.155467] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ephemeral_storage_encryption.enabled = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.155632] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.155807] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.api_servers = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.155974] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.cafile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.156154] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.certfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.156321] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.collect_timing = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.156483] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.connect_retries = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.156644] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.connect_retry_delay = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.156811] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.debug = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.156982] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.default_trusted_certificate_ids = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.157162] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.enable_certificate_validation = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.157330] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.enable_rbd_download = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.157490] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.endpoint_override = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.157658] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.insecure = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.157822] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.keyfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.157986] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.max_version = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.158165] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.min_version = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.158332] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.num_retries = 3 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.158505] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.rbd_ceph_conf = {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.158672] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.rbd_connect_timeout = 5 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.158867] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.rbd_pool = {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.159060] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.rbd_user = {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.159229] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.region_name = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.159397] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.retriable_status_codes = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.159562] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.service_name = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.159736] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.service_type = image {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.159907] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.split_loggers = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.160085] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.status_code_retries = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.160255] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.status_code_retry_delay = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.160419] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.timeout = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.160611] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.160799] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.verify_glance_signatures = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.160974] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] glance.version = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.161164] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] guestfs.debug = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.161338] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] mks.enabled = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.161716] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.161911] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] image_cache.manager_interval = 2400 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.162109] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] image_cache.precache_concurrency = 1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.162293] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] image_cache.remove_unused_base_images = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.162469] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.162643] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.162825] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] image_cache.subdirectory_name = _base {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.163013] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.api_max_retries = 60 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.163195] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.api_retry_interval = 2 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.163360] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.auth_section = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.163526] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.auth_type = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.163689] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.cafile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.163853] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.certfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.164033] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.collect_timing = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.164207] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.conductor_group = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.164371] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.connect_retries = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.164536] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.connect_retry_delay = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.164698] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.endpoint_override = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.164864] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.insecure = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.165041] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.keyfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.165207] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.max_version = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.165369] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.min_version = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.165538] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.peer_list = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.165701] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.region_name = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.165863] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.retriable_status_codes = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.166045] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.serial_console_state_timeout = 10 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.166214] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.service_name = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.166389] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.service_type = baremetal {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.166552] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.shard = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.166721] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.split_loggers = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.166882] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.status_code_retries = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.167056] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.status_code_retry_delay = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.167223] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.timeout = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.167406] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.167568] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ironic.version = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.167751] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.167933] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] key_manager.fixed_key = **** {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.168130] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.168297] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican.barbican_api_version = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.168458] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican.barbican_endpoint = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.168630] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican.barbican_endpoint_type = public {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.168825] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican.barbican_region_name = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.169022] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican.cafile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.169192] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican.certfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.169358] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican.collect_timing = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.169522] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican.insecure = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.169682] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican.keyfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.169845] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican.number_of_retries = 60 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.170019] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican.retry_delay = 1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.170191] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican.send_service_user_token = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.170356] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican.split_loggers = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.170514] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican.timeout = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.170678] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican.verify_ssl = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.170865] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican.verify_ssl_path = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.171065] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican_service_user.auth_section = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.171297] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican_service_user.auth_type = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.171477] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican_service_user.cafile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.171642] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican_service_user.certfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.171833] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican_service_user.collect_timing = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.172027] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican_service_user.insecure = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.172198] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican_service_user.keyfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.172368] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican_service_user.split_loggers = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.172529] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] barbican_service_user.timeout = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.172701] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vault.approle_role_id = **** {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.172865] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vault.approle_secret_id = **** {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.173055] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vault.kv_mountpoint = secret {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.173222] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vault.kv_path = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.173390] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vault.kv_version = 2 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.173551] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vault.namespace = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.173710] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vault.root_token_id = **** {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.173870] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vault.ssl_ca_crt_file = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.174056] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vault.timeout = 60.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.174228] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vault.use_ssl = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.174400] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.174575] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] keystone.auth_section = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.174739] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] keystone.auth_type = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.174903] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] keystone.cafile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.175077] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] keystone.certfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.175247] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] keystone.collect_timing = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.175408] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] keystone.connect_retries = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.175568] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] keystone.connect_retry_delay = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.175729] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] keystone.endpoint_override = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.175912] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] keystone.insecure = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.176101] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] keystone.keyfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.176270] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] keystone.max_version = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.176430] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] keystone.min_version = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.176592] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] keystone.region_name = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.176755] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] keystone.retriable_status_codes = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.176917] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] keystone.service_name = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.177104] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] keystone.service_type = identity {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.177274] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] keystone.split_loggers = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.177437] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] keystone.status_code_retries = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.177601] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] keystone.status_code_retry_delay = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.177764] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] keystone.timeout = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.177951] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.178129] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] keystone.version = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.178337] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.connection_uri = {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.178503] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.cpu_mode = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.178676] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.cpu_model_extra_flags = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.178862] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.cpu_models = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.179050] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.cpu_power_governor_high = performance {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.179229] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.cpu_power_governor_low = powersave {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.179399] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.cpu_power_management = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.179572] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.179737] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.device_detach_attempts = 8 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.179904] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.device_detach_timeout = 20 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.180085] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.disk_cachemodes = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.180250] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.disk_prefix = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.180418] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.enabled_perf_events = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.180583] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.file_backed_memory = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.180776] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.gid_maps = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.180952] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.hw_disk_discard = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.181146] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.hw_machine_type = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.181324] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.images_rbd_ceph_conf = {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.181490] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.181656] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.181854] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.images_rbd_glance_store_name = {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.182056] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.images_rbd_pool = rbd {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.182240] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.images_type = default {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.182405] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.images_volume_group = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.182568] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.inject_key = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.182734] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.inject_partition = -2 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.182898] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.inject_password = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.183079] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.iscsi_iface = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.183248] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.iser_use_multipath = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.183414] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.live_migration_bandwidth = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.183579] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.183743] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.live_migration_downtime = 500 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.183908] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.184083] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.184253] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.live_migration_inbound_addr = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.184416] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.184579] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.live_migration_permit_post_copy = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.184744] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.live_migration_scheme = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.184919] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.live_migration_timeout_action = abort {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.185102] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.live_migration_tunnelled = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.185275] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.live_migration_uri = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.185444] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.live_migration_with_native_tls = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.185607] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.max_queues = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.185774] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.186029] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.186204] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.nfs_mount_options = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.186936] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.187178] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.187364] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.num_iser_scan_tries = 5 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.187536] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.num_memory_encrypted_guests = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.187707] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.187894] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.num_pcie_ports = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.188101] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.num_volume_scan_tries = 5 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.188281] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.pmem_namespaces = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.188448] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.quobyte_client_cfg = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.188758] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.188939] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.rbd_connect_timeout = 5 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.189123] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.189295] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.189461] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.rbd_secret_uuid = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.189621] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.rbd_user = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.189788] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.189965] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.remote_filesystem_transport = ssh {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.190165] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.rescue_image_id = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.190338] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.rescue_kernel_id = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.190500] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.rescue_ramdisk_id = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.190676] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.190874] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.rx_queue_size = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.191071] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.smbfs_mount_options = {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.191366] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.191545] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.snapshot_compression = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.191714] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.snapshot_image_format = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.191970] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.192166] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.sparse_logical_volumes = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.192339] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.swtpm_enabled = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.192517] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.swtpm_group = tss {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.192692] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.swtpm_user = tss {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.192868] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.sysinfo_serial = unique {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.193064] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.tb_cache_size = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.193250] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.tx_queue_size = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.193421] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.uid_maps = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.193589] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.use_virtio_for_bridges = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.193764] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.virt_type = kvm {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.193943] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.volume_clear = zero {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.194125] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.volume_clear_size = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.194323] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.volume_use_multipath = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.194521] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.vzstorage_cache_path = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.194702] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.194877] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.vzstorage_mount_group = qemu {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.195063] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.vzstorage_mount_opts = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.195241] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.195522] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.195711] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.vzstorage_mount_user = stack {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.195884] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.196080] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.auth_section = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.196264] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.auth_type = password {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.196431] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.cafile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.196597] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.certfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.196767] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.collect_timing = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.196934] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.connect_retries = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.197115] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.connect_retry_delay = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.197292] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.default_floating_pool = public {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.197459] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.endpoint_override = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.197633] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.extension_sync_interval = 600 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.197795] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.http_retries = 3 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.197965] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.insecure = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.198163] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.keyfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.198335] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.max_version = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.198514] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.198679] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.min_version = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.198851] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.ovs_bridge = br-int {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.199043] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.physnets = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.199240] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.region_name = RegionOne {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.199408] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.retriable_status_codes = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.199585] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.service_metadata_proxy = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.199749] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.service_name = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.199926] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.service_type = network {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.200109] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.split_loggers = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.200276] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.status_code_retries = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.200439] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.status_code_retry_delay = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.200604] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.timeout = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.200816] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.200994] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] neutron.version = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.201190] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] notifications.bdms_in_notifications = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.201373] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] notifications.default_level = INFO {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.201552] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] notifications.notification_format = unversioned {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.201720] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] notifications.notify_on_state_change = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.201936] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.202137] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] pci.alias = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.202317] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] pci.device_spec = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.202486] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] pci.report_in_placement = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.202662] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.auth_section = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.202839] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.auth_type = password {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.203024] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.203193] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.cafile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.203354] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.certfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.203521] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.collect_timing = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.203682] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.connect_retries = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.203844] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.connect_retry_delay = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.204014] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.default_domain_id = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.204179] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.default_domain_name = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.204339] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.domain_id = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.204499] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.domain_name = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.204659] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.endpoint_override = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.204823] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.insecure = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.204985] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.keyfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.205159] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.max_version = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.205320] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.min_version = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.205489] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.password = **** {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.205649] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.project_domain_id = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.205817] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.project_domain_name = Default {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.205986] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.project_id = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.206175] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.project_name = service {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.206346] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.region_name = RegionOne {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.206511] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.retriable_status_codes = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.206672] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.service_name = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.206842] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.service_type = placement {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.207013] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.split_loggers = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.207181] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.status_code_retries = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.207348] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.status_code_retry_delay = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.207509] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.system_scope = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.207669] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.timeout = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.207828] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.trust_id = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.207989] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.user_domain_id = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.208176] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.user_domain_name = Default {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.208340] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.user_id = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.208519] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.username = nova {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.208703] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.208867] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] placement.version = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.209065] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] quota.cores = 20 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.209241] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] quota.count_usage_from_placement = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.209416] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.209596] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] quota.injected_file_content_bytes = 10240 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.209768] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] quota.injected_file_path_length = 255 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.209940] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] quota.injected_files = 5 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.210122] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] quota.instances = 10 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.210295] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] quota.key_pairs = 100 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.210465] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] quota.metadata_items = 128 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.210634] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] quota.ram = 51200 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.210828] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] quota.recheck_quota = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.211021] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] quota.server_group_members = 10 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.211191] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] quota.server_groups = 10 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.211368] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.211539] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.211706] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] scheduler.image_metadata_prefilter = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.211904] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.212089] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] scheduler.max_attempts = 3 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.212260] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] scheduler.max_placement_results = 1000 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.212427] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.212591] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] scheduler.query_placement_for_image_type_support = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.212755] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.212934] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] scheduler.workers = 2 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.213127] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.213302] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.213498] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.213678] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.213848] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.214030] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.214205] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.214397] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.214567] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.host_subset_size = 1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.214736] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.214898] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.215088] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.215263] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.isolated_hosts = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.215436] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.isolated_images = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.215602] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.215764] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.215933] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.216113] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.pci_in_placement = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.216281] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.216446] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.216608] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.216770] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.216937] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225260] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225260] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.track_instance_changes = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225260] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225260] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] metrics.required = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225260] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] metrics.weight_multiplier = 1.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225260] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225260] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] metrics.weight_setting = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225565] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225565] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] serial_console.enabled = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225565] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] serial_console.port_range = 10000:20000 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225565] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225565] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225565] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] serial_console.serialproxy_port = 6083 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225565] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] service_user.auth_section = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225778] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] service_user.auth_type = password {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225778] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] service_user.cafile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225778] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] service_user.certfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225778] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] service_user.collect_timing = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225778] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] service_user.insecure = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225778] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] service_user.keyfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225778] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] service_user.send_service_user_token = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225990] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] service_user.split_loggers = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225990] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] service_user.timeout = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225990] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] spice.agent_enabled = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225990] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] spice.enabled = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225990] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225990] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.225990] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] spice.html5proxy_port = 6082 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226229] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] spice.image_compression = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226229] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] spice.jpeg_compression = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226229] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] spice.playback_compression = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226229] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] spice.require_secure = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226229] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] spice.server_listen = 127.0.0.1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226229] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226229] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] spice.streaming_mode = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226435] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] spice.zlib_compression = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226435] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] upgrade_levels.baseapi = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226435] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] upgrade_levels.compute = auto {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226435] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] upgrade_levels.conductor = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226435] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] upgrade_levels.scheduler = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226435] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vendordata_dynamic_auth.auth_section = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226435] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vendordata_dynamic_auth.auth_type = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226654] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vendordata_dynamic_auth.cafile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226654] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vendordata_dynamic_auth.certfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226654] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226654] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vendordata_dynamic_auth.insecure = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226654] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vendordata_dynamic_auth.keyfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226654] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226654] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vendordata_dynamic_auth.timeout = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226871] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vmware.api_retry_count = 10 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226871] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vmware.ca_file = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226871] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vmware.cache_prefix = devstack-image-cache {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226871] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vmware.cluster_name = testcl1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226871] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vmware.connection_pool_size = 10 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226871] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vmware.console_delay_seconds = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.226871] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vmware.datastore_regex = ^datastore.* {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.227097] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.227097] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vmware.host_password = **** {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.227161] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vmware.host_port = 443 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.228199] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vmware.host_username = administrator@vsphere.local {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.228199] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vmware.insecure = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.228199] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vmware.integration_bridge = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.228199] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vmware.maximum_objects = 100 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.228199] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vmware.pbm_default_policy = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.228199] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vmware.pbm_enabled = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.228494] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vmware.pbm_wsdl_location = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.228494] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.228637] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vmware.serial_port_proxy_uri = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.228737] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vmware.serial_port_service_uri = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.229107] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vmware.task_poll_interval = 0.5 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.229107] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vmware.use_linked_clone = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.229257] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vmware.vnc_keymap = en-us {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.229420] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vmware.vnc_port = 5900 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.229579] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vmware.vnc_port_total = 10000 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.230581] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vnc.auth_schemes = ['none'] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.230581] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vnc.enabled = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.230581] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.230581] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.230832] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vnc.novncproxy_port = 6080 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.230832] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vnc.server_listen = 127.0.0.1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.231033] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.231211] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vnc.vencrypt_ca_certs = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.231375] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vnc.vencrypt_client_cert = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.231537] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vnc.vencrypt_client_key = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.231721] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.231906] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] workarounds.disable_deep_image_inspection = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.232108] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.232279] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.232447] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.232612] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] workarounds.disable_rootwrap = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.232773] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] workarounds.enable_numa_live_migration = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.232940] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.233116] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.233282] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.233445] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] workarounds.libvirt_disable_apic = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.233609] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.233778] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.233946] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.234124] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.234291] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.234456] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.234619] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.234784] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.234949] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.235135] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.235328] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.235504] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] wsgi.client_socket_timeout = 900 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.235674] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] wsgi.default_pool_size = 1000 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.235845] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] wsgi.keep_alive = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.236032] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] wsgi.max_header_line = 16384 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.236207] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] wsgi.secure_proxy_ssl_header = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.236371] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] wsgi.ssl_ca_file = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.236534] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] wsgi.ssl_cert_file = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.236695] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] wsgi.ssl_key_file = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.236861] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] wsgi.tcp_keepidle = 600 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.237057] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.237233] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] zvm.ca_file = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.237397] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] zvm.cloud_connector_url = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.237713] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.237891] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] zvm.reachable_timeout = 300 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.238093] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_policy.enforce_new_defaults = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.238505] env[63538]: WARNING oslo_config.cfg [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 511.238698] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_policy.enforce_scope = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.238894] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_policy.policy_default_rule = default {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.239104] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.239289] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_policy.policy_file = policy.yaml {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.239470] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.239639] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.239805] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.239979] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.240157] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.240334] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.240514] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.240724] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] profiler.connection_string = messaging:// {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.240893] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] profiler.enabled = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.241094] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] profiler.es_doc_type = notification {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.241271] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] profiler.es_scroll_size = 10000 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.241447] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] profiler.es_scroll_time = 2m {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.241614] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] profiler.filter_error_trace = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.241790] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] profiler.hmac_keys = **** {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.241978] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] profiler.sentinel_service_name = mymaster {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.242170] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] profiler.socket_timeout = 0.1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.242343] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] profiler.trace_requests = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.242511] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] profiler.trace_sqlalchemy = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.242699] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] profiler_jaeger.process_tags = {} {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.242863] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] profiler_jaeger.service_name_prefix = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.243045] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] profiler_otlp.service_name_prefix = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.243273] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] remote_debug.host = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.243454] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] remote_debug.port = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.243642] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.243814] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.243986] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.244172] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.244344] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.244509] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.244675] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.244840] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.245040] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.245209] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.245377] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.245553] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.245726] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.245902] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.246091] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.246268] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.246434] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.246611] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.246778] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.246942] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.247122] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.247289] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.247452] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.247620] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.247789] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.247960] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.248137] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.248303] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.248475] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.248646] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.ssl = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.248823] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.248996] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.249179] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.249352] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.249524] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.ssl_version = {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.249689] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.249886] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.250072] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_notifications.retry = -1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.250272] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.250450] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_messaging_notifications.transport_url = **** {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.250627] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_limit.auth_section = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.250894] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_limit.auth_type = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.251013] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_limit.cafile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.251190] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_limit.certfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.251361] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_limit.collect_timing = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.251532] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_limit.connect_retries = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.251692] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_limit.connect_retry_delay = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.251910] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_limit.endpoint_id = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.252112] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_limit.endpoint_override = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.252290] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_limit.insecure = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.252454] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_limit.keyfile = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.252616] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_limit.max_version = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.252777] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_limit.min_version = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.252944] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_limit.region_name = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.253150] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_limit.retriable_status_codes = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.253330] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_limit.service_name = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.253495] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_limit.service_type = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.253663] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_limit.split_loggers = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.253825] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_limit.status_code_retries = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.253991] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_limit.status_code_retry_delay = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.254173] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_limit.timeout = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.254336] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_limit.valid_interfaces = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.254497] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_limit.version = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.254667] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_reports.file_event_handler = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.254838] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.255011] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] oslo_reports.log_dir = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.255290] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.255476] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.255643] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.255817] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.256015] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.256238] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.256424] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.256594] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vif_plug_ovs_privileged.group = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.256758] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.256931] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.257114] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.257281] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] vif_plug_ovs_privileged.user = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.257459] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] os_vif_linux_bridge.flat_interface = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.257644] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.257823] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.257999] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.258189] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.258368] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.258543] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.258709] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.258938] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.259082] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] os_vif_ovs.isolate_vif = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.259265] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.259435] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.259610] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.259788] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] os_vif_ovs.ovsdb_interface = native {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.259954] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] os_vif_ovs.per_port_bridge = False {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.260144] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] os_brick.lock_path = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.260318] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.260486] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.260659] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] privsep_osbrick.capabilities = [21] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.260876] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] privsep_osbrick.group = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.261070] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] privsep_osbrick.helper_command = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.261248] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.261417] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.261580] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] privsep_osbrick.user = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.261781] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.261983] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] nova_sys_admin.group = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.262171] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] nova_sys_admin.helper_command = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.262346] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.262514] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.262677] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] nova_sys_admin.user = None {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 511.262813] env[63538]: DEBUG oslo_service.service [None req-fb6d3c4d-031c-4d23-b270-64b7f61cfdf4 None None] ******************************************************************************** {{(pid=63538) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2830}} [ 511.263369] env[63538]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 511.767878] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] Getting list of instances from cluster (obj){ [ 511.767878] env[63538]: value = "domain-c8" [ 511.767878] env[63538]: _type = "ClusterComputeResource" [ 511.767878] env[63538]: } {{(pid=63538) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 511.769192] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097b6576-148f-42c8-a648-2584692e1666 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.778646] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] Got total of 0 instances {{(pid=63538) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 511.779155] env[63538]: WARNING nova.virt.vmwareapi.driver [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 511.779684] env[63538]: INFO nova.virt.node [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] Generated node identity f65218a4-1d3d-476a-9093-01cae92c8635 [ 511.779940] env[63538]: INFO nova.virt.node [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] Wrote node identity f65218a4-1d3d-476a-9093-01cae92c8635 to /opt/stack/data/n-cpu-1/compute_id [ 512.283234] env[63538]: WARNING nova.compute.manager [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] Compute nodes ['f65218a4-1d3d-476a-9093-01cae92c8635'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 513.290190] env[63538]: INFO nova.compute.manager [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 514.295694] env[63538]: WARNING nova.compute.manager [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 514.296079] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 514.296180] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 514.296337] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 514.296521] env[63538]: DEBUG nova.compute.resource_tracker [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63538) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 514.297511] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26e5c6a9-e2e7-48e1-8de8-0cf2aaf942c0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.307173] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e7a8e15-3f7c-452a-8853-30bdc2ea420d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.321237] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c11d50b-1728-49ff-8225-3bfd7cbbaba9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.328077] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52c182ea-d31d-4494-ade5-56e7dbc29097 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.356767] env[63538]: DEBUG nova.compute.resource_tracker [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180536MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=63538) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 514.356944] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 514.357590] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 514.859799] env[63538]: WARNING nova.compute.resource_tracker [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] No compute node record for cpu-1:f65218a4-1d3d-476a-9093-01cae92c8635: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host f65218a4-1d3d-476a-9093-01cae92c8635 could not be found. [ 515.363978] env[63538]: INFO nova.compute.resource_tracker [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: f65218a4-1d3d-476a-9093-01cae92c8635 [ 516.871536] env[63538]: DEBUG nova.compute.resource_tracker [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63538) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 516.871960] env[63538]: DEBUG nova.compute.resource_tracker [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=100GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] stats={'failed_builds': '0'} {{(pid=63538) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 517.026687] env[63538]: INFO nova.scheduler.client.report [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] [req-e75bb501-b49a-4038-93be-aca6979a225b] Created resource provider record via placement API for resource provider with UUID f65218a4-1d3d-476a-9093-01cae92c8635 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 517.044031] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b05a05c-4e73-41b7-ba53-f2f8a145ca6f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.052732] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb17d1e4-3799-4553-a579-06db140508f7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.081809] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9da71d6-275b-45c5-9a14-fa7e0d145e07 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.089480] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2e6c63a-d942-4826-8c1b-4d69b976e67b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.102533] env[63538]: DEBUG nova.compute.provider_tree [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 517.639138] env[63538]: DEBUG nova.scheduler.client.report [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] Updated inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 517.639370] env[63538]: DEBUG nova.compute.provider_tree [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] Updating resource provider f65218a4-1d3d-476a-9093-01cae92c8635 generation from 0 to 1 during operation: update_inventory {{(pid=63538) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 517.639540] env[63538]: DEBUG nova.compute.provider_tree [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 517.691042] env[63538]: DEBUG nova.compute.provider_tree [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] Updating resource provider f65218a4-1d3d-476a-9093-01cae92c8635 generation from 1 to 2 during operation: update_traits {{(pid=63538) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 518.195975] env[63538]: DEBUG nova.compute.resource_tracker [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63538) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 518.196330] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.839s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 518.196451] env[63538]: DEBUG nova.service [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] Creating RPC server for service compute {{(pid=63538) start /opt/stack/nova/nova/service.py:186}} [ 518.209750] env[63538]: DEBUG nova.service [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] Join ServiceGroup membership for this service compute {{(pid=63538) start /opt/stack/nova/nova/service.py:203}} [ 518.209973] env[63538]: DEBUG nova.servicegroup.drivers.db [None req-b63dbe11-31c1-4e55-804a-137efcd07031 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=63538) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 559.323024] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Acquiring lock "174368d1-9910-495b-a923-842e0440fd01" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.323024] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Lock "174368d1-9910-495b-a923-842e0440fd01" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.826945] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Acquiring lock "15a8424e-27a6-4b77-b57c-d163345b8fed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.827193] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Lock "15a8424e-27a6-4b77-b57c-d163345b8fed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.831790] env[63538]: DEBUG nova.compute.manager [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 560.333153] env[63538]: DEBUG nova.compute.manager [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 560.371026] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.372215] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.372945] env[63538]: INFO nova.compute.claims [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 560.714648] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Acquiring lock "4b8fb9ad-a366-423d-81b1-04c5e4ec9264" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.714912] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Lock "4b8fb9ad-a366-423d-81b1-04c5e4ec9264" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.870834] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.016154] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Acquiring lock "e1710498-0616-4862-afc0-6e452dc19882" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.016405] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Lock "e1710498-0616-4862-afc0-6e452dc19882" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.219024] env[63538]: DEBUG nova.compute.manager [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 561.518713] env[63538]: DEBUG nova.compute.manager [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 561.537316] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a26975e8-fd67-4b1c-89ef-9f696c518288 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.553466] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f179f49e-dc27-4e51-b096-634901cc8a76 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.596631] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-745ec7a7-ff2b-42f8-836a-8997873f77d3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.605595] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b659ad2-0b9d-462c-86ee-33b8103fbaed {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.626194] env[63538]: DEBUG nova.compute.provider_tree [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 561.659194] env[63538]: DEBUG oslo_concurrency.lockutils [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Acquiring lock "d99b7b8e-633f-4fba-bce6-9b8e9e9892d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.660905] env[63538]: DEBUG oslo_concurrency.lockutils [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Lock "d99b7b8e-633f-4fba-bce6-9b8e9e9892d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.756169] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.064590] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.132610] env[63538]: DEBUG nova.scheduler.client.report [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 562.166687] env[63538]: DEBUG nova.compute.manager [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 562.641592] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.270s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.642229] env[63538]: DEBUG nova.compute.manager [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 562.644880] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.774s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.646654] env[63538]: INFO nova.compute.claims [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 562.695759] env[63538]: DEBUG oslo_concurrency.lockutils [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.123823] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "c8a02fa6-5232-4dde-b6dd-0da1089b6bbf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.124187] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "c8a02fa6-5232-4dde-b6dd-0da1089b6bbf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.153021] env[63538]: DEBUG nova.compute.utils [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 563.153021] env[63538]: DEBUG nova.compute.manager [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 563.153021] env[63538]: DEBUG nova.network.neutron [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 563.303276] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Acquiring lock "0a7c34e0-1acc-4761-804a-eb9ee00fdd77" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.303676] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Lock "0a7c34e0-1acc-4761-804a-eb9ee00fdd77" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.626841] env[63538]: DEBUG nova.compute.manager [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 563.660762] env[63538]: DEBUG nova.compute.manager [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 563.794654] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c9b15b-cf24-455d-95f0-bfb5ce2fe8bd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.803244] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f00935-ba9f-434c-b0cc-66f0a1526e3a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.807787] env[63538]: DEBUG nova.compute.manager [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 563.845805] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a17097c-b018-42e8-84ae-a383f2979798 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.855709] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-920ea34f-d9ac-44f4-9cb3-042fa9f79e95 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.871410] env[63538]: DEBUG nova.compute.provider_tree [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 563.905777] env[63538]: DEBUG nova.policy [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13f1241e50ed4d5699a67d206c4e694e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2488493adc8b48d29e615ebcb8a5935e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 564.159495] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.339715] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.376112] env[63538]: DEBUG nova.scheduler.client.report [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 564.464487] env[63538]: DEBUG nova.network.neutron [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Successfully created port: 51784c09-2e46-4add-9f20-a0a9563f7eaf {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 564.597134] env[63538]: DEBUG oslo_concurrency.lockutils [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Acquiring lock "e3ba860b-afb8-4843-9d99-049dce205f9f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.597363] env[63538]: DEBUG oslo_concurrency.lockutils [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Lock "e3ba860b-afb8-4843-9d99-049dce205f9f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.678226] env[63538]: DEBUG nova.compute.manager [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 564.743523] env[63538]: DEBUG nova.virt.hardware [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 564.743523] env[63538]: DEBUG nova.virt.hardware [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 564.743523] env[63538]: DEBUG nova.virt.hardware [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 564.743761] env[63538]: DEBUG nova.virt.hardware [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 564.743761] env[63538]: DEBUG nova.virt.hardware [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 564.743761] env[63538]: DEBUG nova.virt.hardware [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 564.743761] env[63538]: DEBUG nova.virt.hardware [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 564.743761] env[63538]: DEBUG nova.virt.hardware [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 564.743918] env[63538]: DEBUG nova.virt.hardware [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 564.744127] env[63538]: DEBUG nova.virt.hardware [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 564.744301] env[63538]: DEBUG nova.virt.hardware [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 564.745282] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3049d51-63ab-4121-b072-1dd7f503a534 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.754460] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c568f89d-e209-4c07-ab4b-3160e6ec29f7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.778987] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5917e02b-eb89-45c5-ac80-2ae3b0ede07c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.885093] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.240s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 564.886271] env[63538]: DEBUG nova.compute.manager [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 564.892356] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.136s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.893964] env[63538]: INFO nova.compute.claims [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 565.100582] env[63538]: DEBUG nova.compute.manager [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 565.302943] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "2e1b0bc7-3909-48e2-b9be-26822a57ee67" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.303185] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "2e1b0bc7-3909-48e2-b9be-26822a57ee67" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.408346] env[63538]: DEBUG nova.compute.utils [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 565.413024] env[63538]: DEBUG nova.compute.manager [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 565.413024] env[63538]: DEBUG nova.network.neutron [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 565.575545] env[63538]: DEBUG nova.policy [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '647702bc5cd8400285a0cbeb2fb9082b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '64662e4603754bd4a9a59035b19992c8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 565.654580] env[63538]: DEBUG oslo_concurrency.lockutils [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.806563] env[63538]: DEBUG nova.compute.manager [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 565.921096] env[63538]: DEBUG nova.compute.manager [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 566.157155] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f091b433-d9f2-4578-b8cc-8ec76bb35b89 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.171152] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b63b1b8-ea60-4da7-bd90-02b5db905723 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.211156] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e5b363f-0763-4c66-a823-2d1f39793bca {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.223068] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35778f07-2203-4796-b02a-4fa2a7c53173 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.254272] env[63538]: DEBUG nova.compute.provider_tree [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 566.259554] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "8fb62f47-cbf2-4b46-bc33-845e832f9ef0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.259778] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "8fb62f47-cbf2-4b46-bc33-845e832f9ef0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.339568] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.432135] env[63538]: DEBUG nova.network.neutron [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Successfully created port: 0fbe2e21-79c6-4b82-a23c-a25b732e78b7 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 566.761074] env[63538]: DEBUG nova.scheduler.client.report [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 566.766664] env[63538]: DEBUG nova.compute.manager [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 566.936299] env[63538]: DEBUG nova.compute.manager [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 566.980543] env[63538]: DEBUG nova.virt.hardware [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 566.980759] env[63538]: DEBUG nova.virt.hardware [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 566.980797] env[63538]: DEBUG nova.virt.hardware [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 566.981036] env[63538]: DEBUG nova.virt.hardware [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 566.981211] env[63538]: DEBUG nova.virt.hardware [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 566.981356] env[63538]: DEBUG nova.virt.hardware [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 566.981574] env[63538]: DEBUG nova.virt.hardware [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 566.981810] env[63538]: DEBUG nova.virt.hardware [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 566.981993] env[63538]: DEBUG nova.virt.hardware [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 566.982168] env[63538]: DEBUG nova.virt.hardware [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 566.982334] env[63538]: DEBUG nova.virt.hardware [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 566.983286] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8018a5-8ae5-4d7f-ab92-6a6838b39c13 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.997651] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0806a0cf-9e00-4526-900a-ee902ad08bbc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.271461] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.379s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.272028] env[63538]: DEBUG nova.compute.manager [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 567.279188] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.215s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.280642] env[63538]: INFO nova.compute.claims [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 567.310252] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.777495] env[63538]: DEBUG nova.network.neutron [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Successfully updated port: 51784c09-2e46-4add-9f20-a0a9563f7eaf {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 567.781923] env[63538]: DEBUG nova.compute.utils [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 567.783259] env[63538]: DEBUG nova.compute.manager [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 567.783420] env[63538]: DEBUG nova.network.neutron [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 567.887744] env[63538]: DEBUG nova.policy [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '16065b424d6244d3bb8d28d1f2aa2a60', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2ab51bcca7dc40688572337d893c1b4f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 568.282808] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Acquiring lock "refresh_cache-174368d1-9910-495b-a923-842e0440fd01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.283112] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Acquired lock "refresh_cache-174368d1-9910-495b-a923-842e0440fd01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.283149] env[63538]: DEBUG nova.network.neutron [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 568.293321] env[63538]: DEBUG nova.compute.manager [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 568.672400] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquiring lock "f9fa5578-acf3-416f-9cb0-8ceb00e5132d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.672400] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "f9fa5578-acf3-416f-9cb0-8ceb00e5132d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.772398] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02e7bf54-e23a-4ba6-a463-d7c16ebdeb17 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.782212] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-147b0148-7f29-4ce3-bd68-4d1c913f7324 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.830903] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32f0e98-2637-4117-a805-7cca08ca862e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.839771] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b9d95c4-7d41-46a4-a8f8-44be7edb8d03 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.863928] env[63538]: DEBUG nova.compute.provider_tree [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 568.879878] env[63538]: DEBUG nova.compute.manager [req-b1549eb0-ce19-4509-8e73-2c74e0d7407d req-38bc4018-55dc-4cb5-9442-9763880aa427 service nova] [instance: 174368d1-9910-495b-a923-842e0440fd01] Received event network-vif-plugged-51784c09-2e46-4add-9f20-a0a9563f7eaf {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 568.879878] env[63538]: DEBUG oslo_concurrency.lockutils [req-b1549eb0-ce19-4509-8e73-2c74e0d7407d req-38bc4018-55dc-4cb5-9442-9763880aa427 service nova] Acquiring lock "174368d1-9910-495b-a923-842e0440fd01-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.879878] env[63538]: DEBUG oslo_concurrency.lockutils [req-b1549eb0-ce19-4509-8e73-2c74e0d7407d req-38bc4018-55dc-4cb5-9442-9763880aa427 service nova] Lock "174368d1-9910-495b-a923-842e0440fd01-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.879878] env[63538]: DEBUG oslo_concurrency.lockutils [req-b1549eb0-ce19-4509-8e73-2c74e0d7407d req-38bc4018-55dc-4cb5-9442-9763880aa427 service nova] Lock "174368d1-9910-495b-a923-842e0440fd01-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 568.879878] env[63538]: DEBUG nova.compute.manager [req-b1549eb0-ce19-4509-8e73-2c74e0d7407d req-38bc4018-55dc-4cb5-9442-9763880aa427 service nova] [instance: 174368d1-9910-495b-a923-842e0440fd01] No waiting events found dispatching network-vif-plugged-51784c09-2e46-4add-9f20-a0a9563f7eaf {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 568.880282] env[63538]: WARNING nova.compute.manager [req-b1549eb0-ce19-4509-8e73-2c74e0d7407d req-38bc4018-55dc-4cb5-9442-9763880aa427 service nova] [instance: 174368d1-9910-495b-a923-842e0440fd01] Received unexpected event network-vif-plugged-51784c09-2e46-4add-9f20-a0a9563f7eaf for instance with vm_state building and task_state spawning. [ 569.064144] env[63538]: DEBUG nova.network.neutron [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 569.332645] env[63538]: DEBUG nova.compute.manager [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 569.371510] env[63538]: DEBUG nova.virt.hardware [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 569.371744] env[63538]: DEBUG nova.virt.hardware [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 569.371981] env[63538]: DEBUG nova.virt.hardware [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 569.372190] env[63538]: DEBUG nova.virt.hardware [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 569.372347] env[63538]: DEBUG nova.virt.hardware [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 569.372491] env[63538]: DEBUG nova.virt.hardware [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 569.373639] env[63538]: DEBUG nova.virt.hardware [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 569.373810] env[63538]: DEBUG nova.virt.hardware [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 569.374040] env[63538]: DEBUG nova.virt.hardware [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 569.374200] env[63538]: DEBUG nova.virt.hardware [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 569.374370] env[63538]: DEBUG nova.virt.hardware [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 569.375316] env[63538]: DEBUG nova.scheduler.client.report [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 569.381175] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f26264-1609-4b19-a1b8-60cef5005c3e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.396324] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f3bb422-3144-4075-8c27-2cdbbd5f3ceb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.492624] env[63538]: DEBUG nova.network.neutron [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Successfully created port: 937fc657-0b54-4a28-98fe-43139e1ba61c {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 569.646516] env[63538]: DEBUG nova.network.neutron [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Successfully updated port: 0fbe2e21-79c6-4b82-a23c-a25b732e78b7 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 569.854779] env[63538]: DEBUG nova.compute.manager [req-5409712a-8461-4736-8444-97ca48a22811 req-00f756dd-d593-496c-996d-19a6dbffc2b5 service nova] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Received event network-vif-plugged-0fbe2e21-79c6-4b82-a23c-a25b732e78b7 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 569.855116] env[63538]: DEBUG oslo_concurrency.lockutils [req-5409712a-8461-4736-8444-97ca48a22811 req-00f756dd-d593-496c-996d-19a6dbffc2b5 service nova] Acquiring lock "15a8424e-27a6-4b77-b57c-d163345b8fed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.855407] env[63538]: DEBUG oslo_concurrency.lockutils [req-5409712a-8461-4736-8444-97ca48a22811 req-00f756dd-d593-496c-996d-19a6dbffc2b5 service nova] Lock "15a8424e-27a6-4b77-b57c-d163345b8fed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.855895] env[63538]: DEBUG oslo_concurrency.lockutils [req-5409712a-8461-4736-8444-97ca48a22811 req-00f756dd-d593-496c-996d-19a6dbffc2b5 service nova] Lock "15a8424e-27a6-4b77-b57c-d163345b8fed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.855895] env[63538]: DEBUG nova.compute.manager [req-5409712a-8461-4736-8444-97ca48a22811 req-00f756dd-d593-496c-996d-19a6dbffc2b5 service nova] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] No waiting events found dispatching network-vif-plugged-0fbe2e21-79c6-4b82-a23c-a25b732e78b7 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 569.856145] env[63538]: WARNING nova.compute.manager [req-5409712a-8461-4736-8444-97ca48a22811 req-00f756dd-d593-496c-996d-19a6dbffc2b5 service nova] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Received unexpected event network-vif-plugged-0fbe2e21-79c6-4b82-a23c-a25b732e78b7 for instance with vm_state building and task_state spawning. [ 569.890511] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.609s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.890511] env[63538]: DEBUG nova.compute.manager [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 569.893825] env[63538]: DEBUG oslo_concurrency.lockutils [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.198s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.903022] env[63538]: INFO nova.compute.claims [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 569.908521] env[63538]: DEBUG nova.network.neutron [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Updating instance_info_cache with network_info: [{"id": "51784c09-2e46-4add-9f20-a0a9563f7eaf", "address": "fa:16:3e:df:3b:0a", "network": {"id": "1a29c626-75f0-4d3c-b7cf-2266e6dfe02b", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-706845743-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2488493adc8b48d29e615ebcb8a5935e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca401eaa-889a-4f9f-ac9a-56b4c41bfc06", "external-id": "nsx-vlan-transportzone-877", "segmentation_id": 877, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51784c09-2e", "ovs_interfaceid": "51784c09-2e46-4add-9f20-a0a9563f7eaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.152342] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Acquiring lock "refresh_cache-15a8424e-27a6-4b77-b57c-d163345b8fed" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.152342] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Acquired lock "refresh_cache-15a8424e-27a6-4b77-b57c-d163345b8fed" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.152342] env[63538]: DEBUG nova.network.neutron [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 570.397367] env[63538]: DEBUG nova.compute.utils [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 570.397367] env[63538]: DEBUG nova.compute.manager [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 570.397367] env[63538]: DEBUG nova.network.neutron [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 570.415451] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Releasing lock "refresh_cache-174368d1-9910-495b-a923-842e0440fd01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 570.415886] env[63538]: DEBUG nova.compute.manager [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Instance network_info: |[{"id": "51784c09-2e46-4add-9f20-a0a9563f7eaf", "address": "fa:16:3e:df:3b:0a", "network": {"id": "1a29c626-75f0-4d3c-b7cf-2266e6dfe02b", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-706845743-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2488493adc8b48d29e615ebcb8a5935e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca401eaa-889a-4f9f-ac9a-56b4c41bfc06", "external-id": "nsx-vlan-transportzone-877", "segmentation_id": 877, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51784c09-2e", "ovs_interfaceid": "51784c09-2e46-4add-9f20-a0a9563f7eaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 570.418628] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:3b:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca401eaa-889a-4f9f-ac9a-56b4c41bfc06', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '51784c09-2e46-4add-9f20-a0a9563f7eaf', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 570.433992] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 570.437384] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9e88d7b0-060b-44f8-bfd0-aaef95309549 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.450248] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Created folder: OpenStack in parent group-v4. [ 570.450455] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Creating folder: Project (2488493adc8b48d29e615ebcb8a5935e). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 570.450731] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a7b8baed-d075-4fc3-b838-4e6c9b0c7f4c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.462986] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Created folder: Project (2488493adc8b48d29e615ebcb8a5935e) in parent group-v992234. [ 570.463671] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Creating folder: Instances. Parent ref: group-v992235. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 570.463671] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-78779e04-f24a-4005-82fc-51ada1d585d0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.473571] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Created folder: Instances in parent group-v992235. [ 570.473831] env[63538]: DEBUG oslo.service.loopingcall [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 570.474143] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 174368d1-9910-495b-a923-842e0440fd01] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 570.474308] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-849ad4bb-08de-4e92-a5ef-21321d967c30 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.498630] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 570.498630] env[63538]: value = "task-5100308" [ 570.498630] env[63538]: _type = "Task" [ 570.498630] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.514055] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100308, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.669541] env[63538]: DEBUG nova.policy [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '46f5f2f60465477e90c4508d9ea54e86', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2602b69ba91f4ecca53962b19ccdedc1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 570.905598] env[63538]: DEBUG nova.compute.manager [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 570.948379] env[63538]: DEBUG nova.network.neutron [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 571.015646] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100308, 'name': CreateVM_Task, 'duration_secs': 0.38201} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 571.019742] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 174368d1-9910-495b-a923-842e0440fd01] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 571.072520] env[63538]: DEBUG oslo_vmware.service [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442302bd-d37c-4a3f-a119-a8c603243415 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.081282] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.083188] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.083952] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 571.084646] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6e16840-22e2-40d1-a2a7-5764f77e29f9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.093564] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Waiting for the task: (returnval){ [ 571.093564] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]528c8c1a-2974-c398-03b2-88266e07cc73" [ 571.093564] env[63538]: _type = "Task" [ 571.093564] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.108222] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]528c8c1a-2974-c398-03b2-88266e07cc73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.205783] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-049b25d5-1ad8-468e-95e8-367eaeaca626 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.216851] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b04be2e5-b553-48c1-93b6-639a57ac21a6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.270409] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bca9166-af75-4135-9c54-aa05d6d8b3ef {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.281337] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82088af4-9a53-489a-8741-54e2cad38380 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.307801] env[63538]: DEBUG nova.compute.provider_tree [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 571.492241] env[63538]: DEBUG nova.compute.manager [req-cc08990c-af6d-4ef2-918f-633107a3d9b9 req-33450091-4a80-4d21-bb83-dcee719f9439 service nova] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Received event network-changed-0fbe2e21-79c6-4b82-a23c-a25b732e78b7 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 571.492519] env[63538]: DEBUG nova.compute.manager [req-cc08990c-af6d-4ef2-918f-633107a3d9b9 req-33450091-4a80-4d21-bb83-dcee719f9439 service nova] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Refreshing instance network info cache due to event network-changed-0fbe2e21-79c6-4b82-a23c-a25b732e78b7. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 571.492640] env[63538]: DEBUG oslo_concurrency.lockutils [req-cc08990c-af6d-4ef2-918f-633107a3d9b9 req-33450091-4a80-4d21-bb83-dcee719f9439 service nova] Acquiring lock "refresh_cache-15a8424e-27a6-4b77-b57c-d163345b8fed" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.569176] env[63538]: DEBUG nova.network.neutron [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Updating instance_info_cache with network_info: [{"id": "0fbe2e21-79c6-4b82-a23c-a25b732e78b7", "address": "fa:16:3e:4d:75:eb", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.66", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0fbe2e21-79", "ovs_interfaceid": "0fbe2e21-79c6-4b82-a23c-a25b732e78b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.608503] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.608792] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 571.609019] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.609189] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.609568] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 571.609819] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ecda50e2-399c-461b-88c7-75146fce6812 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.632131] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 571.632517] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 571.633166] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a14840f8-0879-41cf-9bf3-cf4b5458d25d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.642216] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09a33f14-5e2d-4461-b2c1-929b2545a23f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.650684] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Waiting for the task: (returnval){ [ 571.650684] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522c7aeb-32f4-e8bc-8168-e4dff9a0fa3b" [ 571.650684] env[63538]: _type = "Task" [ 571.650684] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.662509] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522c7aeb-32f4-e8bc-8168-e4dff9a0fa3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.814396] env[63538]: DEBUG nova.scheduler.client.report [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 571.924487] env[63538]: DEBUG nova.compute.manager [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 571.934106] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Acquiring lock "10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.934344] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Lock "10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.970633] env[63538]: DEBUG nova.virt.hardware [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 571.971092] env[63538]: DEBUG nova.virt.hardware [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 571.971208] env[63538]: DEBUG nova.virt.hardware [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 571.971368] env[63538]: DEBUG nova.virt.hardware [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 571.971513] env[63538]: DEBUG nova.virt.hardware [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 571.971703] env[63538]: DEBUG nova.virt.hardware [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 571.973147] env[63538]: DEBUG nova.virt.hardware [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 571.973147] env[63538]: DEBUG nova.virt.hardware [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 571.973147] env[63538]: DEBUG nova.virt.hardware [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 571.974369] env[63538]: DEBUG nova.virt.hardware [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 571.974369] env[63538]: DEBUG nova.virt.hardware [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 571.974902] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2c91c75-dcca-43ad-9f72-f83f1acb7d99 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.987586] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f191c08-a41d-4443-8c47-5374de21dbb8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.073331] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Releasing lock "refresh_cache-15a8424e-27a6-4b77-b57c-d163345b8fed" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.073672] env[63538]: DEBUG nova.compute.manager [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Instance network_info: |[{"id": "0fbe2e21-79c6-4b82-a23c-a25b732e78b7", "address": "fa:16:3e:4d:75:eb", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.66", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0fbe2e21-79", "ovs_interfaceid": "0fbe2e21-79c6-4b82-a23c-a25b732e78b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 572.074135] env[63538]: DEBUG oslo_concurrency.lockutils [req-cc08990c-af6d-4ef2-918f-633107a3d9b9 req-33450091-4a80-4d21-bb83-dcee719f9439 service nova] Acquired lock "refresh_cache-15a8424e-27a6-4b77-b57c-d163345b8fed" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.074933] env[63538]: DEBUG nova.network.neutron [req-cc08990c-af6d-4ef2-918f-633107a3d9b9 req-33450091-4a80-4d21-bb83-dcee719f9439 service nova] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Refreshing network info cache for port 0fbe2e21-79c6-4b82-a23c-a25b732e78b7 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 572.076367] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:75:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24144f5a-050a-4f1e-8d8c-774dc16dc791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0fbe2e21-79c6-4b82-a23c-a25b732e78b7', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 572.085018] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Creating folder: Project (64662e4603754bd4a9a59035b19992c8). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 572.088638] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fab5d018-1b36-458d-9261-75655cc0fe04 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.102998] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Created folder: Project (64662e4603754bd4a9a59035b19992c8) in parent group-v992234. [ 572.103247] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Creating folder: Instances. Parent ref: group-v992238. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 572.103493] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-66859fe9-fcec-48a3-b6a2-0492ecc6d962 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.113638] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Created folder: Instances in parent group-v992238. [ 572.113821] env[63538]: DEBUG oslo.service.loopingcall [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 572.114024] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 572.114281] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fcd961fb-16b3-4373-856e-1420e134db5e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.138884] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 572.138884] env[63538]: value = "task-5100311" [ 572.138884] env[63538]: _type = "Task" [ 572.138884] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.152108] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100311, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.153541] env[63538]: DEBUG nova.network.neutron [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Successfully updated port: 937fc657-0b54-4a28-98fe-43139e1ba61c {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 572.168948] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Preparing fetch location {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 572.169282] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Creating directory with path [datastore1] vmware_temp/a1f71faa-040e-4fd2-a73d-9c334453428a/faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 572.169629] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f4daa69a-9cc9-46d1-a0b1-87cb87af0615 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.184099] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Created directory with path [datastore1] vmware_temp/a1f71faa-040e-4fd2-a73d-9c334453428a/faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 572.184099] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Fetch image to [datastore1] vmware_temp/a1f71faa-040e-4fd2-a73d-9c334453428a/faabbca4-e27b-433a-b93d-f059fd73bc92/tmp-sparse.vmdk {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 572.184445] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Downloading image file data faabbca4-e27b-433a-b93d-f059fd73bc92 to [datastore1] vmware_temp/a1f71faa-040e-4fd2-a73d-9c334453428a/faabbca4-e27b-433a-b93d-f059fd73bc92/tmp-sparse.vmdk on the data store datastore1 {{(pid=63538) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 572.185272] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d165ca79-506f-48d3-ad3f-db7bbc89ed12 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.200013] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3411096-d5a1-4518-99a6-923df61f49e6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.210421] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b9289e-9f3f-471f-a77e-0b17af3d4439 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.252912] env[63538]: DEBUG nova.network.neutron [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Successfully created port: 9bd783f7-7fb0-4c77-923b-34206070a65c {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 572.256844] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e7741d-9e79-4c21-b1a2-dc2d850c6e9e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.261605] env[63538]: DEBUG nova.compute.manager [req-793cff58-63d4-48f1-8dab-8cf9f570f3d6 req-8020750f-0be9-4236-88ea-fa2d51181732 service nova] [instance: 174368d1-9910-495b-a923-842e0440fd01] Received event network-changed-51784c09-2e46-4add-9f20-a0a9563f7eaf {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 572.261605] env[63538]: DEBUG nova.compute.manager [req-793cff58-63d4-48f1-8dab-8cf9f570f3d6 req-8020750f-0be9-4236-88ea-fa2d51181732 service nova] [instance: 174368d1-9910-495b-a923-842e0440fd01] Refreshing instance network info cache due to event network-changed-51784c09-2e46-4add-9f20-a0a9563f7eaf. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 572.261605] env[63538]: DEBUG oslo_concurrency.lockutils [req-793cff58-63d4-48f1-8dab-8cf9f570f3d6 req-8020750f-0be9-4236-88ea-fa2d51181732 service nova] Acquiring lock "refresh_cache-174368d1-9910-495b-a923-842e0440fd01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.261783] env[63538]: DEBUG oslo_concurrency.lockutils [req-793cff58-63d4-48f1-8dab-8cf9f570f3d6 req-8020750f-0be9-4236-88ea-fa2d51181732 service nova] Acquired lock "refresh_cache-174368d1-9910-495b-a923-842e0440fd01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.262028] env[63538]: DEBUG nova.network.neutron [req-793cff58-63d4-48f1-8dab-8cf9f570f3d6 req-8020750f-0be9-4236-88ea-fa2d51181732 service nova] [instance: 174368d1-9910-495b-a923-842e0440fd01] Refreshing network info cache for port 51784c09-2e46-4add-9f20-a0a9563f7eaf {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 572.269701] env[63538]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-22b44b8f-abd4-4352-82c9-79cd4645c51b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.309772] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Downloading image file data faabbca4-e27b-433a-b93d-f059fd73bc92 to the data store datastore1 {{(pid=63538) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 572.321262] env[63538]: DEBUG oslo_concurrency.lockutils [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.426s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 572.321262] env[63538]: DEBUG nova.compute.manager [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 572.325349] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.166s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.326563] env[63538]: INFO nova.compute.claims [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 572.435133] env[63538]: DEBUG oslo_vmware.rw_handles [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a1f71faa-040e-4fd2-a73d-9c334453428a/faabbca4-e27b-433a-b93d-f059fd73bc92/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=63538) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 572.508549] env[63538]: DEBUG nova.network.neutron [req-cc08990c-af6d-4ef2-918f-633107a3d9b9 req-33450091-4a80-4d21-bb83-dcee719f9439 service nova] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Updated VIF entry in instance network info cache for port 0fbe2e21-79c6-4b82-a23c-a25b732e78b7. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 572.509843] env[63538]: DEBUG nova.network.neutron [req-cc08990c-af6d-4ef2-918f-633107a3d9b9 req-33450091-4a80-4d21-bb83-dcee719f9439 service nova] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Updating instance_info_cache with network_info: [{"id": "0fbe2e21-79c6-4b82-a23c-a25b732e78b7", "address": "fa:16:3e:4d:75:eb", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.66", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0fbe2e21-79", "ovs_interfaceid": "0fbe2e21-79c6-4b82-a23c-a25b732e78b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.652822] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100311, 'name': CreateVM_Task, 'duration_secs': 0.408604} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.658279] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 572.665222] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.665222] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.665222] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 572.665222] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ae525dc-f2f0-4c6f-8f6e-8d62bd088a12 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.665222] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Acquiring lock "refresh_cache-4b8fb9ad-a366-423d-81b1-04c5e4ec9264" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.665932] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Acquired lock "refresh_cache-4b8fb9ad-a366-423d-81b1-04c5e4ec9264" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.667440] env[63538]: DEBUG nova.network.neutron [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 572.682752] env[63538]: DEBUG oslo_vmware.api [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Waiting for the task: (returnval){ [ 572.682752] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bc40a2-0371-1813-a5ab-2c0106ac23b4" [ 572.682752] env[63538]: _type = "Task" [ 572.682752] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.732192] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.732360] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 572.732621] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.840021] env[63538]: DEBUG nova.compute.utils [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 572.851226] env[63538]: DEBUG nova.compute.manager [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Not allocating networking since 'none' was specified. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 573.016515] env[63538]: DEBUG oslo_concurrency.lockutils [req-cc08990c-af6d-4ef2-918f-633107a3d9b9 req-33450091-4a80-4d21-bb83-dcee719f9439 service nova] Releasing lock "refresh_cache-15a8424e-27a6-4b77-b57c-d163345b8fed" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.054839] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Acquiring lock "36d40b69-fae7-4867-afa1-4befdc96bde0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.055118] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Lock "36d40b69-fae7-4867-afa1-4befdc96bde0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.195908] env[63538]: DEBUG oslo_vmware.rw_handles [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Completed reading data from the image iterator. {{(pid=63538) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 573.195908] env[63538]: DEBUG oslo_vmware.rw_handles [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a1f71faa-040e-4fd2-a73d-9c334453428a/faabbca4-e27b-433a-b93d-f059fd73bc92/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=63538) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 573.287759] env[63538]: DEBUG nova.network.neutron [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 573.335329] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Downloaded image file data faabbca4-e27b-433a-b93d-f059fd73bc92 to vmware_temp/a1f71faa-040e-4fd2-a73d-9c334453428a/faabbca4-e27b-433a-b93d-f059fd73bc92/tmp-sparse.vmdk on the data store datastore1 {{(pid=63538) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 573.337317] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Caching image {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 573.339108] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Copying Virtual Disk [datastore1] vmware_temp/a1f71faa-040e-4fd2-a73d-9c334453428a/faabbca4-e27b-433a-b93d-f059fd73bc92/tmp-sparse.vmdk to [datastore1] vmware_temp/a1f71faa-040e-4fd2-a73d-9c334453428a/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 573.339108] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5a24e228-7b5c-4113-bec5-339ace4638c8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.350604] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Waiting for the task: (returnval){ [ 573.350604] env[63538]: value = "task-5100312" [ 573.350604] env[63538]: _type = "Task" [ 573.350604] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.351335] env[63538]: DEBUG nova.compute.manager [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 573.369256] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': task-5100312, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.614597] env[63538]: DEBUG nova.network.neutron [req-793cff58-63d4-48f1-8dab-8cf9f570f3d6 req-8020750f-0be9-4236-88ea-fa2d51181732 service nova] [instance: 174368d1-9910-495b-a923-842e0440fd01] Updated VIF entry in instance network info cache for port 51784c09-2e46-4add-9f20-a0a9563f7eaf. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 573.614879] env[63538]: DEBUG nova.network.neutron [req-793cff58-63d4-48f1-8dab-8cf9f570f3d6 req-8020750f-0be9-4236-88ea-fa2d51181732 service nova] [instance: 174368d1-9910-495b-a923-842e0440fd01] Updating instance_info_cache with network_info: [{"id": "51784c09-2e46-4add-9f20-a0a9563f7eaf", "address": "fa:16:3e:df:3b:0a", "network": {"id": "1a29c626-75f0-4d3c-b7cf-2266e6dfe02b", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-706845743-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2488493adc8b48d29e615ebcb8a5935e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca401eaa-889a-4f9f-ac9a-56b4c41bfc06", "external-id": "nsx-vlan-transportzone-877", "segmentation_id": 877, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51784c09-2e", "ovs_interfaceid": "51784c09-2e46-4add-9f20-a0a9563f7eaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 573.626318] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ee87033-e9a9-49d3-9788-8332034e8184 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.641617] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e19890d-3831-416a-b72f-a035ebc20b9f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.677687] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91a06ed-7859-40d1-9ffa-cf5f775cead0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.687095] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0c10061-b3d0-4d85-a7d6-9d42dbebdb3e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.693365] env[63538]: DEBUG nova.compute.manager [req-f31d7a80-0489-4992-96be-ec59705e1e9a req-2c237012-68cc-479e-a651-8b6814f6da70 service nova] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Received event network-vif-plugged-937fc657-0b54-4a28-98fe-43139e1ba61c {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 573.694034] env[63538]: DEBUG oslo_concurrency.lockutils [req-f31d7a80-0489-4992-96be-ec59705e1e9a req-2c237012-68cc-479e-a651-8b6814f6da70 service nova] Acquiring lock "4b8fb9ad-a366-423d-81b1-04c5e4ec9264-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.694034] env[63538]: DEBUG oslo_concurrency.lockutils [req-f31d7a80-0489-4992-96be-ec59705e1e9a req-2c237012-68cc-479e-a651-8b6814f6da70 service nova] Lock "4b8fb9ad-a366-423d-81b1-04c5e4ec9264-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.694212] env[63538]: DEBUG oslo_concurrency.lockutils [req-f31d7a80-0489-4992-96be-ec59705e1e9a req-2c237012-68cc-479e-a651-8b6814f6da70 service nova] Lock "4b8fb9ad-a366-423d-81b1-04c5e4ec9264-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.694382] env[63538]: DEBUG nova.compute.manager [req-f31d7a80-0489-4992-96be-ec59705e1e9a req-2c237012-68cc-479e-a651-8b6814f6da70 service nova] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] No waiting events found dispatching network-vif-plugged-937fc657-0b54-4a28-98fe-43139e1ba61c {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 573.694538] env[63538]: WARNING nova.compute.manager [req-f31d7a80-0489-4992-96be-ec59705e1e9a req-2c237012-68cc-479e-a651-8b6814f6da70 service nova] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Received unexpected event network-vif-plugged-937fc657-0b54-4a28-98fe-43139e1ba61c for instance with vm_state building and task_state spawning. [ 573.706112] env[63538]: DEBUG nova.compute.provider_tree [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 573.867422] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': task-5100312, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.072737] env[63538]: DEBUG nova.network.neutron [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Updating instance_info_cache with network_info: [{"id": "937fc657-0b54-4a28-98fe-43139e1ba61c", "address": "fa:16:3e:d0:5d:4c", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.38", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap937fc657-0b", "ovs_interfaceid": "937fc657-0b54-4a28-98fe-43139e1ba61c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.117354] env[63538]: DEBUG oslo_concurrency.lockutils [req-793cff58-63d4-48f1-8dab-8cf9f570f3d6 req-8020750f-0be9-4236-88ea-fa2d51181732 service nova] Releasing lock "refresh_cache-174368d1-9910-495b-a923-842e0440fd01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.213182] env[63538]: DEBUG nova.scheduler.client.report [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 574.369780] env[63538]: DEBUG nova.compute.manager [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 574.372555] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': task-5100312, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.84636} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.373456] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Copied Virtual Disk [datastore1] vmware_temp/a1f71faa-040e-4fd2-a73d-9c334453428a/faabbca4-e27b-433a-b93d-f059fd73bc92/tmp-sparse.vmdk to [datastore1] vmware_temp/a1f71faa-040e-4fd2-a73d-9c334453428a/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 574.373713] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Deleting the datastore file [datastore1] vmware_temp/a1f71faa-040e-4fd2-a73d-9c334453428a/faabbca4-e27b-433a-b93d-f059fd73bc92/tmp-sparse.vmdk {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 574.373927] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-528597d6-1b54-4b34-a051-3bdc6d028d32 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.385320] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Waiting for the task: (returnval){ [ 574.385320] env[63538]: value = "task-5100313" [ 574.385320] env[63538]: _type = "Task" [ 574.385320] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.395462] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': task-5100313, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.414390] env[63538]: DEBUG nova.virt.hardware [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 574.414574] env[63538]: DEBUG nova.virt.hardware [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 574.414725] env[63538]: DEBUG nova.virt.hardware [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 574.414898] env[63538]: DEBUG nova.virt.hardware [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 574.415241] env[63538]: DEBUG nova.virt.hardware [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 574.415381] env[63538]: DEBUG nova.virt.hardware [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 574.415629] env[63538]: DEBUG nova.virt.hardware [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 574.415792] env[63538]: DEBUG nova.virt.hardware [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 574.415983] env[63538]: DEBUG nova.virt.hardware [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 574.416131] env[63538]: DEBUG nova.virt.hardware [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 574.416297] env[63538]: DEBUG nova.virt.hardware [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 574.417304] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4cea650-cfe1-44ec-9b98-a7a34ed4245d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.427697] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bfb06f0-3807-432e-9110-a99bb7edcc3d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.447056] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Instance VIF info [] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 574.452667] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Creating folder: Project (4aaa7ff9816b44e88f2afb0869b10b3d). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 574.453371] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7ed12af5-f30c-4bac-904b-c488af3611b5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.467776] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Created folder: Project (4aaa7ff9816b44e88f2afb0869b10b3d) in parent group-v992234. [ 574.467899] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Creating folder: Instances. Parent ref: group-v992241. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 574.468206] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-33c3eee6-c731-42f1-a68d-88c1d1719b8a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.479245] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Created folder: Instances in parent group-v992241. [ 574.479245] env[63538]: DEBUG oslo.service.loopingcall [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 574.479245] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 574.479245] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dc82df63-478d-481d-9010-c709f93bd452 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.504728] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 574.504728] env[63538]: value = "task-5100316" [ 574.504728] env[63538]: _type = "Task" [ 574.504728] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.515182] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100316, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.576344] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Releasing lock "refresh_cache-4b8fb9ad-a366-423d-81b1-04c5e4ec9264" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.576344] env[63538]: DEBUG nova.compute.manager [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Instance network_info: |[{"id": "937fc657-0b54-4a28-98fe-43139e1ba61c", "address": "fa:16:3e:d0:5d:4c", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.38", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap937fc657-0b", "ovs_interfaceid": "937fc657-0b54-4a28-98fe-43139e1ba61c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 574.576658] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:5d:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24144f5a-050a-4f1e-8d8c-774dc16dc791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '937fc657-0b54-4a28-98fe-43139e1ba61c', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 574.588549] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Creating folder: Project (2ab51bcca7dc40688572337d893c1b4f). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 574.588888] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-68b02791-173b-4004-9af1-13af81787598 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.602150] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Created folder: Project (2ab51bcca7dc40688572337d893c1b4f) in parent group-v992234. [ 574.602371] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Creating folder: Instances. Parent ref: group-v992244. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 574.602613] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-30768142-9c85-415e-8b64-0fc3b3afe1c5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.614764] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Created folder: Instances in parent group-v992244. [ 574.616268] env[63538]: DEBUG oslo.service.loopingcall [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 574.616416] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 574.616586] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-96106718-5c66-4ad3-bacc-5fd2737ff57f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.639298] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 574.639298] env[63538]: value = "task-5100319" [ 574.639298] env[63538]: _type = "Task" [ 574.639298] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.649052] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100319, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.723711] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.399s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 574.725530] env[63538]: DEBUG nova.compute.manager [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 574.729879] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.391s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.731718] env[63538]: INFO nova.compute.claims [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 574.898665] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': task-5100313, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.026943} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.899618] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 574.899982] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Moving file from [datastore1] vmware_temp/a1f71faa-040e-4fd2-a73d-9c334453428a/faabbca4-e27b-433a-b93d-f059fd73bc92 to [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92. {{(pid=63538) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 574.900330] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-e4efdb76-1d51-4253-8f3d-ce0120dba010 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.911755] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Waiting for the task: (returnval){ [ 574.911755] env[63538]: value = "task-5100320" [ 574.911755] env[63538]: _type = "Task" [ 574.911755] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.922924] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': task-5100320, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.020682] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100316, 'name': CreateVM_Task, 'duration_secs': 0.382261} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.024325] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 575.027366] env[63538]: DEBUG oslo_concurrency.lockutils [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.027366] env[63538]: DEBUG oslo_concurrency.lockutils [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.027366] env[63538]: DEBUG oslo_concurrency.lockutils [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 575.027366] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6ad287a-856e-4389-9e1d-890860c006d2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.032174] env[63538]: DEBUG oslo_vmware.api [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Waiting for the task: (returnval){ [ 575.032174] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52560436-46bb-ee97-528a-937565d6d867" [ 575.032174] env[63538]: _type = "Task" [ 575.032174] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.043930] env[63538]: DEBUG oslo_vmware.api [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52560436-46bb-ee97-528a-937565d6d867, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.129976] env[63538]: DEBUG nova.compute.manager [req-26acb5c6-eebd-48ce-92c9-4d9edbf3d0f3 req-5ea96741-bc94-4fdf-b8e9-b49ecc1db3b0 service nova] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Received event network-changed-937fc657-0b54-4a28-98fe-43139e1ba61c {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 575.130079] env[63538]: DEBUG nova.compute.manager [req-26acb5c6-eebd-48ce-92c9-4d9edbf3d0f3 req-5ea96741-bc94-4fdf-b8e9-b49ecc1db3b0 service nova] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Refreshing instance network info cache due to event network-changed-937fc657-0b54-4a28-98fe-43139e1ba61c. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 575.131478] env[63538]: DEBUG oslo_concurrency.lockutils [req-26acb5c6-eebd-48ce-92c9-4d9edbf3d0f3 req-5ea96741-bc94-4fdf-b8e9-b49ecc1db3b0 service nova] Acquiring lock "refresh_cache-4b8fb9ad-a366-423d-81b1-04c5e4ec9264" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.131478] env[63538]: DEBUG oslo_concurrency.lockutils [req-26acb5c6-eebd-48ce-92c9-4d9edbf3d0f3 req-5ea96741-bc94-4fdf-b8e9-b49ecc1db3b0 service nova] Acquired lock "refresh_cache-4b8fb9ad-a366-423d-81b1-04c5e4ec9264" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.131478] env[63538]: DEBUG nova.network.neutron [req-26acb5c6-eebd-48ce-92c9-4d9edbf3d0f3 req-5ea96741-bc94-4fdf-b8e9-b49ecc1db3b0 service nova] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Refreshing network info cache for port 937fc657-0b54-4a28-98fe-43139e1ba61c {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 575.154061] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Acquiring lock "bf54098e-91a8-403f-a6fe-b58a62daaadb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.154148] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Lock "bf54098e-91a8-403f-a6fe-b58a62daaadb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.159501] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100319, 'name': CreateVM_Task, 'duration_secs': 0.437009} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.159892] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 575.160553] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.213260] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.213260] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.213260] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Starting heal instance info cache {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10017}} [ 575.213260] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Rebuilding the list of instances to heal {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10021}} [ 575.239868] env[63538]: DEBUG nova.compute.utils [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 575.247327] env[63538]: DEBUG nova.compute.manager [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 575.247327] env[63538]: DEBUG nova.network.neutron [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 575.339934] env[63538]: DEBUG nova.policy [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f91d4ce5a5724fb7b785591ae831506d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c1f0c999ede418c866074d9276050ff', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 575.421988] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': task-5100320, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.030306} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.422334] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] File moved {{(pid=63538) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 575.422594] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Cleaning up location [datastore1] vmware_temp/a1f71faa-040e-4fd2-a73d-9c334453428a {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 575.422811] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Deleting the datastore file [datastore1] vmware_temp/a1f71faa-040e-4fd2-a73d-9c334453428a {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 575.423131] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ae418e40-17b4-4af3-9b3f-6e2e4eff9b0c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.433057] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Waiting for the task: (returnval){ [ 575.433057] env[63538]: value = "task-5100321" [ 575.433057] env[63538]: _type = "Task" [ 575.433057] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.445289] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': task-5100321, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.547383] env[63538]: DEBUG oslo_vmware.api [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52560436-46bb-ee97-528a-937565d6d867, 'name': SearchDatastore_Task, 'duration_secs': 0.011018} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.548049] env[63538]: DEBUG oslo_concurrency.lockutils [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 575.548049] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 575.548196] env[63538]: DEBUG oslo_concurrency.lockutils [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.548431] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.549468] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 575.549468] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b085fa0e-ee92-4b1e-b479-149729f5d7f2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.555140] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Waiting for the task: (returnval){ [ 575.555140] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b04c19-7261-d270-dba4-8aef662f67e7" [ 575.555140] env[63538]: _type = "Task" [ 575.555140] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.565423] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b04c19-7261-d270-dba4-8aef662f67e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.718596] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 174368d1-9910-495b-a923-842e0440fd01] Skipping network cache update for instance because it is Building. {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10030}} [ 575.718869] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Skipping network cache update for instance because it is Building. {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10030}} [ 575.718869] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Skipping network cache update for instance because it is Building. {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10030}} [ 575.719926] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: e1710498-0616-4862-afc0-6e452dc19882] Skipping network cache update for instance because it is Building. {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10030}} [ 575.719926] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Skipping network cache update for instance because it is Building. {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10030}} [ 575.719926] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Skipping network cache update for instance because it is Building. {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10030}} [ 575.719926] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Skipping network cache update for instance because it is Building. {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10030}} [ 575.719926] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Didn't find any instances for network info cache update. {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10103}} [ 575.720227] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.720482] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.720542] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.720718] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.720910] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.721104] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._sync_power_states {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.751074] env[63538]: DEBUG nova.compute.manager [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 575.839958] env[63538]: DEBUG nova.network.neutron [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Successfully updated port: 9bd783f7-7fb0-4c77-923b-34206070a65c {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 575.947851] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': task-5100321, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.027186} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.948659] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 575.953455] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6fce6e9-bfd0-4ee3-a890-0b02eefda19d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.964340] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Waiting for the task: (returnval){ [ 575.964340] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5253a8f0-24bb-e8a3-3725-dec9aae1612e" [ 575.964340] env[63538]: _type = "Task" [ 575.964340] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.977562] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5253a8f0-24bb-e8a3-3725-dec9aae1612e, 'name': SearchDatastore_Task, 'duration_secs': 0.009966} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.984551] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 575.984551] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 174368d1-9910-495b-a923-842e0440fd01/174368d1-9910-495b-a923-842e0440fd01.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 575.984551] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.984551] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 575.984835] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-89931c94-fc79-4f03-8734-e7dc0f3aedcc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.986784] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-22fa1ccd-7645-446f-85ab-1d1ec43044ff {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.999070] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Waiting for the task: (returnval){ [ 575.999070] env[63538]: value = "task-5100322" [ 575.999070] env[63538]: _type = "Task" [ 575.999070] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.017848] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': task-5100322, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.018815] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 576.019012] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 576.019820] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e179c186-da69-4e95-9c40-29059d22f0bc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.030174] env[63538]: DEBUG oslo_vmware.api [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Waiting for the task: (returnval){ [ 576.030174] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5288ce7a-0eef-f27c-8b0c-d602006ebd59" [ 576.030174] env[63538]: _type = "Task" [ 576.030174] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.052316] env[63538]: DEBUG oslo_vmware.api [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5288ce7a-0eef-f27c-8b0c-d602006ebd59, 'name': SearchDatastore_Task, 'duration_secs': 0.010742} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.053152] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edeac699-e0d6-404c-b28f-63c7f9ab6325 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.068130] env[63538]: DEBUG oslo_vmware.api [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Waiting for the task: (returnval){ [ 576.068130] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52dea380-b4a5-6e26-cc3e-3094922b41ca" [ 576.068130] env[63538]: _type = "Task" [ 576.068130] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.079559] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b04c19-7261-d270-dba4-8aef662f67e7, 'name': SearchDatastore_Task, 'duration_secs': 0.008878} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.083879] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.087748] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 576.087986] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.095189] env[63538]: DEBUG oslo_vmware.api [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52dea380-b4a5-6e26-cc3e-3094922b41ca, 'name': SearchDatastore_Task, 'duration_secs': 0.007989} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.095189] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.095837] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 15a8424e-27a6-4b77-b57c-d163345b8fed/15a8424e-27a6-4b77-b57c-d163345b8fed.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 576.095837] env[63538]: DEBUG oslo_concurrency.lockutils [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.096008] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 576.097247] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fec8fc2e-03f1-498e-a1ca-b987e59604f1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.109511] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e11896aa-7e97-4a3d-8e0d-55c4cb652433 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.119674] env[63538]: DEBUG oslo_vmware.api [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Waiting for the task: (returnval){ [ 576.119674] env[63538]: value = "task-5100323" [ 576.119674] env[63538]: _type = "Task" [ 576.119674] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.123157] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 576.123157] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 576.125860] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39919387-42f7-4c65-964e-457be8ffa3e6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.134229] env[63538]: DEBUG oslo_vmware.api [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Waiting for the task: (returnval){ [ 576.134229] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a2ec26-4801-230f-c6e7-13a7ddc80f89" [ 576.134229] env[63538]: _type = "Task" [ 576.134229] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.141023] env[63538]: DEBUG oslo_vmware.api [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Task: {'id': task-5100323, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.150524] env[63538]: DEBUG oslo_vmware.api [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a2ec26-4801-230f-c6e7-13a7ddc80f89, 'name': SearchDatastore_Task, 'duration_secs': 0.009404} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.154864] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6038bc8-1380-404d-b35e-cebe197a9dd8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.161458] env[63538]: DEBUG oslo_vmware.api [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Waiting for the task: (returnval){ [ 576.161458] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527caa78-d8db-717f-7179-d9f6205e79fc" [ 576.161458] env[63538]: _type = "Task" [ 576.161458] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.171418] env[63538]: DEBUG oslo_vmware.api [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527caa78-d8db-717f-7179-d9f6205e79fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.204199] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28795258-c7c2-41f9-a746-2d04f072adc0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.216880] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac79a7c-9bb5-4dd7-a532-cc2bbc411a8b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.258878] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Getting list of instances from cluster (obj){ [ 576.258878] env[63538]: value = "domain-c8" [ 576.258878] env[63538]: _type = "ClusterComputeResource" [ 576.258878] env[63538]: } {{(pid=63538) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 576.262063] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-322417b5-9930-4425-be84-182e385bdd24 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.268883] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a39346-164c-4ac8-b1d2-8aa41d78dff5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.286899] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda7e8f9-5363-4660-b9c8-d14d0a0fa33c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.295302] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Got total of 4 instances {{(pid=63538) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 576.295487] env[63538]: WARNING nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] While synchronizing instance power states, found 7 instances in the database and 4 instances on the hypervisor. [ 576.295640] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Triggering sync for uuid 174368d1-9910-495b-a923-842e0440fd01 {{(pid=63538) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10427}} [ 576.299667] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Triggering sync for uuid 15a8424e-27a6-4b77-b57c-d163345b8fed {{(pid=63538) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10427}} [ 576.300132] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Triggering sync for uuid 4b8fb9ad-a366-423d-81b1-04c5e4ec9264 {{(pid=63538) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10427}} [ 576.300299] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Triggering sync for uuid e1710498-0616-4862-afc0-6e452dc19882 {{(pid=63538) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10427}} [ 576.300573] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Triggering sync for uuid d99b7b8e-633f-4fba-bce6-9b8e9e9892d1 {{(pid=63538) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10427}} [ 576.300858] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Triggering sync for uuid c8a02fa6-5232-4dde-b6dd-0da1089b6bbf {{(pid=63538) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10427}} [ 576.300919] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Triggering sync for uuid 0a7c34e0-1acc-4761-804a-eb9ee00fdd77 {{(pid=63538) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10427}} [ 576.306928] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "174368d1-9910-495b-a923-842e0440fd01" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.306928] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "15a8424e-27a6-4b77-b57c-d163345b8fed" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.306928] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "4b8fb9ad-a366-423d-81b1-04c5e4ec9264" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.306928] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "e1710498-0616-4862-afc0-6e452dc19882" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.306928] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "d99b7b8e-633f-4fba-bce6-9b8e9e9892d1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.307137] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "c8a02fa6-5232-4dde-b6dd-0da1089b6bbf" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.307137] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "0a7c34e0-1acc-4761-804a-eb9ee00fdd77" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.307137] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 576.307137] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63538) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10636}} [ 576.309155] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 576.322064] env[63538]: DEBUG nova.compute.provider_tree [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 576.349466] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Acquiring lock "refresh_cache-e1710498-0616-4862-afc0-6e452dc19882" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.349466] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Acquired lock "refresh_cache-e1710498-0616-4862-afc0-6e452dc19882" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.349466] env[63538]: DEBUG nova.network.neutron [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 576.477724] env[63538]: DEBUG nova.network.neutron [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Successfully created port: 8d80ee33-5e67-4651-a9b1-1f58ca92fb2e {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 576.514691] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': task-5100322, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.539201] env[63538]: DEBUG nova.network.neutron [req-26acb5c6-eebd-48ce-92c9-4d9edbf3d0f3 req-5ea96741-bc94-4fdf-b8e9-b49ecc1db3b0 service nova] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Updated VIF entry in instance network info cache for port 937fc657-0b54-4a28-98fe-43139e1ba61c. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 576.540038] env[63538]: DEBUG nova.network.neutron [req-26acb5c6-eebd-48ce-92c9-4d9edbf3d0f3 req-5ea96741-bc94-4fdf-b8e9-b49ecc1db3b0 service nova] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Updating instance_info_cache with network_info: [{"id": "937fc657-0b54-4a28-98fe-43139e1ba61c", "address": "fa:16:3e:d0:5d:4c", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.38", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap937fc657-0b", "ovs_interfaceid": "937fc657-0b54-4a28-98fe-43139e1ba61c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.634382] env[63538]: DEBUG oslo_vmware.api [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Task: {'id': task-5100323, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.677162] env[63538]: DEBUG oslo_vmware.api [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527caa78-d8db-717f-7179-d9f6205e79fc, 'name': SearchDatastore_Task, 'duration_secs': 0.076369} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.677395] env[63538]: DEBUG oslo_concurrency.lockutils [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.677821] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] d99b7b8e-633f-4fba-bce6-9b8e9e9892d1/d99b7b8e-633f-4fba-bce6-9b8e9e9892d1.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 576.678211] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.678495] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 576.678766] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-976e479c-d561-4bb8-b77b-f435ec04c8ca {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.682368] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-539d3386-e514-47b4-a822-b05cab45d445 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.692387] env[63538]: DEBUG oslo_vmware.api [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Waiting for the task: (returnval){ [ 576.692387] env[63538]: value = "task-5100324" [ 576.692387] env[63538]: _type = "Task" [ 576.692387] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.700283] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 576.700591] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 576.705189] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c4e84ac-cc44-497e-881d-85e579605a51 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.708393] env[63538]: DEBUG oslo_vmware.api [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Task: {'id': task-5100324, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.713180] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Waiting for the task: (returnval){ [ 576.713180] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520c1130-1151-86a9-e22a-ca5fea0d4d3a" [ 576.713180] env[63538]: _type = "Task" [ 576.713180] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.728130] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520c1130-1151-86a9-e22a-ca5fea0d4d3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.779789] env[63538]: DEBUG nova.compute.manager [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 576.812494] env[63538]: DEBUG nova.virt.hardware [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 576.812849] env[63538]: DEBUG nova.virt.hardware [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 576.813112] env[63538]: DEBUG nova.virt.hardware [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 576.813338] env[63538]: DEBUG nova.virt.hardware [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 576.813559] env[63538]: DEBUG nova.virt.hardware [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 576.814149] env[63538]: DEBUG nova.virt.hardware [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 576.814509] env[63538]: DEBUG nova.virt.hardware [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 576.814820] env[63538]: DEBUG nova.virt.hardware [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 576.815309] env[63538]: DEBUG nova.virt.hardware [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 576.815656] env[63538]: DEBUG nova.virt.hardware [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 576.815878] env[63538]: DEBUG nova.virt.hardware [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 576.817711] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.819205] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d70738b-8140-4969-9249-6711967205e1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.830771] env[63538]: DEBUG nova.scheduler.client.report [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 576.838028] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8120c6a9-1706-4ab5-8eb8-35bb571727b1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.984805] env[63538]: DEBUG nova.network.neutron [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 577.019966] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': task-5100322, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524431} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.021905] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 174368d1-9910-495b-a923-842e0440fd01/174368d1-9910-495b-a923-842e0440fd01.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 577.021905] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 577.021905] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e52b14cf-f4e9-49ba-9fb6-679102870208 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.031139] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Waiting for the task: (returnval){ [ 577.031139] env[63538]: value = "task-5100325" [ 577.031139] env[63538]: _type = "Task" [ 577.031139] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.046281] env[63538]: DEBUG oslo_concurrency.lockutils [req-26acb5c6-eebd-48ce-92c9-4d9edbf3d0f3 req-5ea96741-bc94-4fdf-b8e9-b49ecc1db3b0 service nova] Releasing lock "refresh_cache-4b8fb9ad-a366-423d-81b1-04c5e4ec9264" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 577.046811] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': task-5100325, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.145623] env[63538]: DEBUG oslo_vmware.api [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Task: {'id': task-5100323, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.802861} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.145941] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 15a8424e-27a6-4b77-b57c-d163345b8fed/15a8424e-27a6-4b77-b57c-d163345b8fed.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 577.146305] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 577.146435] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a7d3a359-cb76-41a3-bf95-1722d3d040ab {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.156275] env[63538]: DEBUG oslo_vmware.api [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Waiting for the task: (returnval){ [ 577.156275] env[63538]: value = "task-5100326" [ 577.156275] env[63538]: _type = "Task" [ 577.156275] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.175318] env[63538]: DEBUG oslo_vmware.api [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Task: {'id': task-5100326, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.207097] env[63538]: DEBUG oslo_vmware.api [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Task: {'id': task-5100324, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.227879] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520c1130-1151-86a9-e22a-ca5fea0d4d3a, 'name': SearchDatastore_Task, 'duration_secs': 0.052829} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.228762] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b55e0f0-f90a-49d2-bb09-1abf691ac506 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.238026] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Waiting for the task: (returnval){ [ 577.238026] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52daea05-d65e-89e9-9c52-864b1194fbd2" [ 577.238026] env[63538]: _type = "Task" [ 577.238026] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.246684] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52daea05-d65e-89e9-9c52-864b1194fbd2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.345352] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.615s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 577.345592] env[63538]: DEBUG nova.compute.manager [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 577.348417] env[63538]: DEBUG oslo_concurrency.lockutils [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.694s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.351443] env[63538]: INFO nova.compute.claims [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 577.528048] env[63538]: DEBUG nova.network.neutron [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Updating instance_info_cache with network_info: [{"id": "9bd783f7-7fb0-4c77-923b-34206070a65c", "address": "fa:16:3e:35:fa:86", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.181", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9bd783f7-7f", "ovs_interfaceid": "9bd783f7-7fb0-4c77-923b-34206070a65c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.548814] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': task-5100325, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088613} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.549988] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 577.552273] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18cefceb-3dea-4b2e-8171-c9eb635939e2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.587351] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] 174368d1-9910-495b-a923-842e0440fd01/174368d1-9910-495b-a923-842e0440fd01.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 577.589254] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76b04cf7-f38c-4ff0-baba-dc440e410031 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.615781] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Waiting for the task: (returnval){ [ 577.615781] env[63538]: value = "task-5100327" [ 577.615781] env[63538]: _type = "Task" [ 577.615781] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.629830] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': task-5100327, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.669389] env[63538]: DEBUG oslo_vmware.api [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Task: {'id': task-5100326, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097824} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.670025] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 577.671065] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25ad7e3-3177-484c-93db-b3054d811d2e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.700012] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] 15a8424e-27a6-4b77-b57c-d163345b8fed/15a8424e-27a6-4b77-b57c-d163345b8fed.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 577.700572] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5badb5d-9283-4732-a24d-f76c10432f82 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.725245] env[63538]: DEBUG oslo_vmware.api [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Task: {'id': task-5100324, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.712516} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.726702] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] d99b7b8e-633f-4fba-bce6-9b8e9e9892d1/d99b7b8e-633f-4fba-bce6-9b8e9e9892d1.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 577.726959] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 577.727896] env[63538]: DEBUG oslo_vmware.api [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Waiting for the task: (returnval){ [ 577.727896] env[63538]: value = "task-5100328" [ 577.727896] env[63538]: _type = "Task" [ 577.727896] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.727896] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6fdf638d-a60d-47ab-bcd0-d31a05e828fa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.744345] env[63538]: DEBUG oslo_vmware.api [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Waiting for the task: (returnval){ [ 577.744345] env[63538]: value = "task-5100329" [ 577.744345] env[63538]: _type = "Task" [ 577.744345] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.754251] env[63538]: DEBUG oslo_vmware.api [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Task: {'id': task-5100329, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.762743] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52daea05-d65e-89e9-9c52-864b1194fbd2, 'name': SearchDatastore_Task, 'duration_secs': 0.060511} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.762930] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 577.763164] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 4b8fb9ad-a366-423d-81b1-04c5e4ec9264/4b8fb9ad-a366-423d-81b1-04c5e4ec9264.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 577.763498] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5714fb95-321c-428a-bc11-43c905877156 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.772297] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Waiting for the task: (returnval){ [ 577.772297] env[63538]: value = "task-5100330" [ 577.772297] env[63538]: _type = "Task" [ 577.772297] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.782449] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100330, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.858285] env[63538]: DEBUG nova.compute.utils [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 577.866629] env[63538]: DEBUG nova.compute.manager [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 577.866629] env[63538]: DEBUG nova.network.neutron [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 578.034033] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Releasing lock "refresh_cache-e1710498-0616-4862-afc0-6e452dc19882" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.034033] env[63538]: DEBUG nova.compute.manager [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Instance network_info: |[{"id": "9bd783f7-7fb0-4c77-923b-34206070a65c", "address": "fa:16:3e:35:fa:86", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.181", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9bd783f7-7f", "ovs_interfaceid": "9bd783f7-7fb0-4c77-923b-34206070a65c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 578.034277] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:fa:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24144f5a-050a-4f1e-8d8c-774dc16dc791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9bd783f7-7fb0-4c77-923b-34206070a65c', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 578.042853] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Creating folder: Project (2602b69ba91f4ecca53962b19ccdedc1). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 578.043991] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8b6a5200-c9d2-4b23-8e85-caa7973a5298 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.055841] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Created folder: Project (2602b69ba91f4ecca53962b19ccdedc1) in parent group-v992234. [ 578.055841] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Creating folder: Instances. Parent ref: group-v992247. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 578.055841] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-276dae08-6b2e-43fb-a157-23b2cbfa3edc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.065779] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Created folder: Instances in parent group-v992247. [ 578.065929] env[63538]: DEBUG oslo.service.loopingcall [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 578.066185] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1710498-0616-4862-afc0-6e452dc19882] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 578.066438] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-64e1dd10-e53b-4d69-a5a0-e5f460966da7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.095857] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 578.095857] env[63538]: value = "task-5100333" [ 578.095857] env[63538]: _type = "Task" [ 578.095857] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.104358] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100333, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.127569] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': task-5100327, 'name': ReconfigVM_Task, 'duration_secs': 0.353136} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.127569] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Reconfigured VM instance instance-00000001 to attach disk [datastore1] 174368d1-9910-495b-a923-842e0440fd01/174368d1-9910-495b-a923-842e0440fd01.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 578.127569] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-718cee88-53d3-4482-946c-49144e0ecb4a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.136814] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Waiting for the task: (returnval){ [ 578.136814] env[63538]: value = "task-5100334" [ 578.136814] env[63538]: _type = "Task" [ 578.136814] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.148294] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': task-5100334, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.244083] env[63538]: DEBUG oslo_vmware.api [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Task: {'id': task-5100328, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.258487] env[63538]: DEBUG oslo_vmware.api [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Task: {'id': task-5100329, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089496} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.258982] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 578.260241] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9df1bbd-6a1b-423c-9ac4-ebecd1d7654c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.287971] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] d99b7b8e-633f-4fba-bce6-9b8e9e9892d1/d99b7b8e-633f-4fba-bce6-9b8e9e9892d1.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 578.289560] env[63538]: DEBUG nova.policy [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73a5bcb6aebe4e8c8d8eb990686da887', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '64606c99619d48e1a165d3ab701caf7a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 578.294906] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99366d81-30f4-48ce-84b4-f639dddf0a35 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.318333] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100330, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.320294] env[63538]: DEBUG oslo_vmware.api [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Waiting for the task: (returnval){ [ 578.320294] env[63538]: value = "task-5100335" [ 578.320294] env[63538]: _type = "Task" [ 578.320294] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.335034] env[63538]: DEBUG oslo_vmware.api [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Task: {'id': task-5100335, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.361244] env[63538]: DEBUG nova.compute.manager [req-d47c0079-db1d-433c-885d-d131bb7d18ff req-b957cc2c-45ba-46d7-bb0b-02bbb53c17a7 service nova] [instance: e1710498-0616-4862-afc0-6e452dc19882] Received event network-vif-plugged-9bd783f7-7fb0-4c77-923b-34206070a65c {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 578.361589] env[63538]: DEBUG oslo_concurrency.lockutils [req-d47c0079-db1d-433c-885d-d131bb7d18ff req-b957cc2c-45ba-46d7-bb0b-02bbb53c17a7 service nova] Acquiring lock "e1710498-0616-4862-afc0-6e452dc19882-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.362024] env[63538]: DEBUG oslo_concurrency.lockutils [req-d47c0079-db1d-433c-885d-d131bb7d18ff req-b957cc2c-45ba-46d7-bb0b-02bbb53c17a7 service nova] Lock "e1710498-0616-4862-afc0-6e452dc19882-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.362419] env[63538]: DEBUG oslo_concurrency.lockutils [req-d47c0079-db1d-433c-885d-d131bb7d18ff req-b957cc2c-45ba-46d7-bb0b-02bbb53c17a7 service nova] Lock "e1710498-0616-4862-afc0-6e452dc19882-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.362511] env[63538]: DEBUG nova.compute.manager [req-d47c0079-db1d-433c-885d-d131bb7d18ff req-b957cc2c-45ba-46d7-bb0b-02bbb53c17a7 service nova] [instance: e1710498-0616-4862-afc0-6e452dc19882] No waiting events found dispatching network-vif-plugged-9bd783f7-7fb0-4c77-923b-34206070a65c {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 578.362945] env[63538]: WARNING nova.compute.manager [req-d47c0079-db1d-433c-885d-d131bb7d18ff req-b957cc2c-45ba-46d7-bb0b-02bbb53c17a7 service nova] [instance: e1710498-0616-4862-afc0-6e452dc19882] Received unexpected event network-vif-plugged-9bd783f7-7fb0-4c77-923b-34206070a65c for instance with vm_state building and task_state spawning. [ 578.363239] env[63538]: DEBUG nova.compute.manager [req-d47c0079-db1d-433c-885d-d131bb7d18ff req-b957cc2c-45ba-46d7-bb0b-02bbb53c17a7 service nova] [instance: e1710498-0616-4862-afc0-6e452dc19882] Received event network-changed-9bd783f7-7fb0-4c77-923b-34206070a65c {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 578.363459] env[63538]: DEBUG nova.compute.manager [req-d47c0079-db1d-433c-885d-d131bb7d18ff req-b957cc2c-45ba-46d7-bb0b-02bbb53c17a7 service nova] [instance: e1710498-0616-4862-afc0-6e452dc19882] Refreshing instance network info cache due to event network-changed-9bd783f7-7fb0-4c77-923b-34206070a65c. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 578.363822] env[63538]: DEBUG oslo_concurrency.lockutils [req-d47c0079-db1d-433c-885d-d131bb7d18ff req-b957cc2c-45ba-46d7-bb0b-02bbb53c17a7 service nova] Acquiring lock "refresh_cache-e1710498-0616-4862-afc0-6e452dc19882" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 578.363941] env[63538]: DEBUG oslo_concurrency.lockutils [req-d47c0079-db1d-433c-885d-d131bb7d18ff req-b957cc2c-45ba-46d7-bb0b-02bbb53c17a7 service nova] Acquired lock "refresh_cache-e1710498-0616-4862-afc0-6e452dc19882" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.364247] env[63538]: DEBUG nova.network.neutron [req-d47c0079-db1d-433c-885d-d131bb7d18ff req-b957cc2c-45ba-46d7-bb0b-02bbb53c17a7 service nova] [instance: e1710498-0616-4862-afc0-6e452dc19882] Refreshing network info cache for port 9bd783f7-7fb0-4c77-923b-34206070a65c {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 578.367035] env[63538]: DEBUG nova.compute.manager [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 578.612607] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100333, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.652878] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': task-5100334, 'name': Rename_Task, 'duration_secs': 0.189045} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.653204] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 578.653494] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ad0dee3e-b7ce-4143-94bb-3683bec672f4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.661973] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Waiting for the task: (returnval){ [ 578.661973] env[63538]: value = "task-5100336" [ 578.661973] env[63538]: _type = "Task" [ 578.661973] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.678960] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': task-5100336, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.727863] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-820d1528-eb55-4cd5-94b6-c560909c972a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.742384] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-622ee850-9f2e-4901-9ad7-4bbf77f8f800 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.745957] env[63538]: DEBUG oslo_vmware.api [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Task: {'id': task-5100328, 'name': ReconfigVM_Task, 'duration_secs': 0.799152} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.746273] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Reconfigured VM instance instance-00000002 to attach disk [datastore1] 15a8424e-27a6-4b77-b57c-d163345b8fed/15a8424e-27a6-4b77-b57c-d163345b8fed.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 578.747273] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fa2fc251-bcde-4afc-bda4-7c8c72e9d96a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.782771] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e5e62d-b5a3-4d5f-928a-837127aed451 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.785540] env[63538]: DEBUG oslo_vmware.api [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Waiting for the task: (returnval){ [ 578.785540] env[63538]: value = "task-5100337" [ 578.785540] env[63538]: _type = "Task" [ 578.785540] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.795486] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100330, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.787083} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.797215] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d7bb3e4-98e8-4ea4-92b6-793bd5e4bfb8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.803403] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 4b8fb9ad-a366-423d-81b1-04c5e4ec9264/4b8fb9ad-a366-423d-81b1-04c5e4ec9264.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 578.803403] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 578.806056] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2637df16-1a00-4b79-8624-3fceadaa4545 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.808432] env[63538]: DEBUG oslo_vmware.api [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Task: {'id': task-5100337, 'name': Rename_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.821424] env[63538]: DEBUG nova.compute.provider_tree [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 578.828352] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Waiting for the task: (returnval){ [ 578.828352] env[63538]: value = "task-5100338" [ 578.828352] env[63538]: _type = "Task" [ 578.828352] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.836874] env[63538]: DEBUG oslo_vmware.api [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Task: {'id': task-5100335, 'name': ReconfigVM_Task, 'duration_secs': 0.359166} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.837736] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Reconfigured VM instance instance-00000005 to attach disk [datastore1] d99b7b8e-633f-4fba-bce6-9b8e9e9892d1/d99b7b8e-633f-4fba-bce6-9b8e9e9892d1.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 578.838773] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e74c5126-584a-47be-a8a7-272870b0e4fc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.844609] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100338, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.851039] env[63538]: DEBUG oslo_vmware.api [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Waiting for the task: (returnval){ [ 578.851039] env[63538]: value = "task-5100339" [ 578.851039] env[63538]: _type = "Task" [ 578.851039] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.866735] env[63538]: DEBUG oslo_vmware.api [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Task: {'id': task-5100339, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.109039] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100333, 'name': CreateVM_Task, 'duration_secs': 0.714404} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.110799] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1710498-0616-4862-afc0-6e452dc19882] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 579.111065] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.111231] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.111553] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 579.112043] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-499a9ad5-f284-4037-abc6-c4635e169b4f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.120542] env[63538]: DEBUG oslo_vmware.api [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Waiting for the task: (returnval){ [ 579.120542] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5248ca4a-af9a-81c7-7d41-5c468aa77018" [ 579.120542] env[63538]: _type = "Task" [ 579.120542] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.134694] env[63538]: DEBUG oslo_vmware.api [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5248ca4a-af9a-81c7-7d41-5c468aa77018, 'name': SearchDatastore_Task, 'duration_secs': 0.011546} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.135378] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 579.135753] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 579.136544] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.136850] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.139938] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 579.139938] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-310733a8-bf37-4aa9-9b99-3558f941827f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.149505] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 579.149505] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 579.149505] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98761fc0-27c1-4a1c-b3c6-212270af2f43 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.158099] env[63538]: DEBUG oslo_vmware.api [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Waiting for the task: (returnval){ [ 579.158099] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5229287a-2115-0b69-cc6d-fbe0b2e9224a" [ 579.158099] env[63538]: _type = "Task" [ 579.158099] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.177889] env[63538]: DEBUG oslo_vmware.api [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5229287a-2115-0b69-cc6d-fbe0b2e9224a, 'name': SearchDatastore_Task, 'duration_secs': 0.011872} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.181485] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': task-5100336, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.182318] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3503861b-88f0-4574-b1b3-48f5c7164631 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.189453] env[63538]: DEBUG oslo_vmware.api [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Waiting for the task: (returnval){ [ 579.189453] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]528529ba-587c-9620-7125-2e394089de31" [ 579.189453] env[63538]: _type = "Task" [ 579.189453] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.200222] env[63538]: DEBUG oslo_vmware.api [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]528529ba-587c-9620-7125-2e394089de31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.298927] env[63538]: DEBUG oslo_vmware.api [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Task: {'id': task-5100337, 'name': Rename_Task, 'duration_secs': 0.166102} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.299270] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 579.299517] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-94b7456a-a749-4548-9df6-49cea45a0f2a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.309160] env[63538]: DEBUG oslo_vmware.api [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Waiting for the task: (returnval){ [ 579.309160] env[63538]: value = "task-5100340" [ 579.309160] env[63538]: _type = "Task" [ 579.309160] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.323069] env[63538]: DEBUG oslo_vmware.api [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Task: {'id': task-5100340, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.330132] env[63538]: DEBUG nova.scheduler.client.report [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 579.346171] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100338, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.111339} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.346587] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 579.347960] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-055c6d92-b13e-4d11-b4ae-db488f048f91 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.381932] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 4b8fb9ad-a366-423d-81b1-04c5e4ec9264/4b8fb9ad-a366-423d-81b1-04c5e4ec9264.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 579.382302] env[63538]: DEBUG oslo_vmware.api [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Task: {'id': task-5100339, 'name': Rename_Task, 'duration_secs': 0.175237} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.385122] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-614267da-a57e-452a-868d-89a08aab1ee7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.402057] env[63538]: DEBUG nova.compute.manager [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 579.402585] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 579.403820] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f96c9268-408d-4b50-a6fd-16bd88a8571f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.414640] env[63538]: DEBUG oslo_vmware.api [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Waiting for the task: (returnval){ [ 579.414640] env[63538]: value = "task-5100341" [ 579.414640] env[63538]: _type = "Task" [ 579.414640] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.414953] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Waiting for the task: (returnval){ [ 579.414953] env[63538]: value = "task-5100342" [ 579.414953] env[63538]: _type = "Task" [ 579.414953] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.430598] env[63538]: DEBUG oslo_vmware.api [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Task: {'id': task-5100341, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.437122] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100342, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.449960] env[63538]: DEBUG nova.virt.hardware [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 579.450346] env[63538]: DEBUG nova.virt.hardware [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 579.450550] env[63538]: DEBUG nova.virt.hardware [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 579.450767] env[63538]: DEBUG nova.virt.hardware [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 579.451050] env[63538]: DEBUG nova.virt.hardware [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 579.451118] env[63538]: DEBUG nova.virt.hardware [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 579.451277] env[63538]: DEBUG nova.virt.hardware [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 579.451442] env[63538]: DEBUG nova.virt.hardware [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 579.451625] env[63538]: DEBUG nova.virt.hardware [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 579.451877] env[63538]: DEBUG nova.virt.hardware [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 579.452096] env[63538]: DEBUG nova.virt.hardware [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 579.453378] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa0cda3-02fc-4750-97f9-5261779c3fc5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.462020] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abece8ba-d294-4aa5-84be-30da4f966994 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.677373] env[63538]: DEBUG oslo_vmware.api [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': task-5100336, 'name': PowerOnVM_Task, 'duration_secs': 0.552122} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.677535] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 579.678290] env[63538]: INFO nova.compute.manager [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Took 15.00 seconds to spawn the instance on the hypervisor. [ 579.678704] env[63538]: DEBUG nova.compute.manager [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 579.679929] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eef6afb-8641-4c7f-8077-44ab320670cf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.702732] env[63538]: DEBUG oslo_vmware.api [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]528529ba-587c-9620-7125-2e394089de31, 'name': SearchDatastore_Task, 'duration_secs': 0.011089} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.703599] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 579.703599] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] e1710498-0616-4862-afc0-6e452dc19882/e1710498-0616-4862-afc0-6e452dc19882.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 579.703599] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-67d9b996-6dad-44bb-ba22-1637ab9b2fb8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.711828] env[63538]: DEBUG oslo_vmware.api [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Waiting for the task: (returnval){ [ 579.711828] env[63538]: value = "task-5100343" [ 579.711828] env[63538]: _type = "Task" [ 579.711828] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.723340] env[63538]: DEBUG oslo_vmware.api [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Task: {'id': task-5100343, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.727221] env[63538]: DEBUG nova.network.neutron [req-d47c0079-db1d-433c-885d-d131bb7d18ff req-b957cc2c-45ba-46d7-bb0b-02bbb53c17a7 service nova] [instance: e1710498-0616-4862-afc0-6e452dc19882] Updated VIF entry in instance network info cache for port 9bd783f7-7fb0-4c77-923b-34206070a65c. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 579.727597] env[63538]: DEBUG nova.network.neutron [req-d47c0079-db1d-433c-885d-d131bb7d18ff req-b957cc2c-45ba-46d7-bb0b-02bbb53c17a7 service nova] [instance: e1710498-0616-4862-afc0-6e452dc19882] Updating instance_info_cache with network_info: [{"id": "9bd783f7-7fb0-4c77-923b-34206070a65c", "address": "fa:16:3e:35:fa:86", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.181", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9bd783f7-7f", "ovs_interfaceid": "9bd783f7-7fb0-4c77-923b-34206070a65c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 579.824597] env[63538]: DEBUG oslo_vmware.api [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Task: {'id': task-5100340, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.841914] env[63538]: DEBUG oslo_concurrency.lockutils [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.493s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.842548] env[63538]: DEBUG nova.compute.manager [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 579.845183] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.506s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.846640] env[63538]: INFO nova.compute.claims [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 579.864682] env[63538]: DEBUG nova.network.neutron [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Successfully updated port: 8d80ee33-5e67-4651-a9b1-1f58ca92fb2e {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 579.940708] env[63538]: DEBUG oslo_vmware.api [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Task: {'id': task-5100341, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.944674] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100342, 'name': ReconfigVM_Task, 'duration_secs': 0.500922} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.945020] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 4b8fb9ad-a366-423d-81b1-04c5e4ec9264/4b8fb9ad-a366-423d-81b1-04c5e4ec9264.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 579.946587] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-781b7db0-d202-4ce9-aa6e-d2aae92df27a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.956032] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Waiting for the task: (returnval){ [ 579.956032] env[63538]: value = "task-5100344" [ 579.956032] env[63538]: _type = "Task" [ 579.956032] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.969914] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100344, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.206590] env[63538]: DEBUG nova.compute.manager [req-7c7e4269-9789-4fd9-9aad-5848871e64eb req-1f60c41a-681d-4d8b-bc47-9e3f855f9454 service nova] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Received event network-vif-plugged-8d80ee33-5e67-4651-a9b1-1f58ca92fb2e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 580.207077] env[63538]: DEBUG oslo_concurrency.lockutils [req-7c7e4269-9789-4fd9-9aad-5848871e64eb req-1f60c41a-681d-4d8b-bc47-9e3f855f9454 service nova] Acquiring lock "c8a02fa6-5232-4dde-b6dd-0da1089b6bbf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.207077] env[63538]: DEBUG oslo_concurrency.lockutils [req-7c7e4269-9789-4fd9-9aad-5848871e64eb req-1f60c41a-681d-4d8b-bc47-9e3f855f9454 service nova] Lock "c8a02fa6-5232-4dde-b6dd-0da1089b6bbf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.207970] env[63538]: DEBUG oslo_concurrency.lockutils [req-7c7e4269-9789-4fd9-9aad-5848871e64eb req-1f60c41a-681d-4d8b-bc47-9e3f855f9454 service nova] Lock "c8a02fa6-5232-4dde-b6dd-0da1089b6bbf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 580.207970] env[63538]: DEBUG nova.compute.manager [req-7c7e4269-9789-4fd9-9aad-5848871e64eb req-1f60c41a-681d-4d8b-bc47-9e3f855f9454 service nova] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] No waiting events found dispatching network-vif-plugged-8d80ee33-5e67-4651-a9b1-1f58ca92fb2e {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 580.208161] env[63538]: WARNING nova.compute.manager [req-7c7e4269-9789-4fd9-9aad-5848871e64eb req-1f60c41a-681d-4d8b-bc47-9e3f855f9454 service nova] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Received unexpected event network-vif-plugged-8d80ee33-5e67-4651-a9b1-1f58ca92fb2e for instance with vm_state building and task_state spawning. [ 580.211693] env[63538]: INFO nova.compute.manager [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Took 19.87 seconds to build instance. [ 580.226266] env[63538]: DEBUG oslo_vmware.api [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Task: {'id': task-5100343, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.230784] env[63538]: DEBUG oslo_concurrency.lockutils [req-d47c0079-db1d-433c-885d-d131bb7d18ff req-b957cc2c-45ba-46d7-bb0b-02bbb53c17a7 service nova] Releasing lock "refresh_cache-e1710498-0616-4862-afc0-6e452dc19882" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 580.324949] env[63538]: DEBUG oslo_vmware.api [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Task: {'id': task-5100340, 'name': PowerOnVM_Task, 'duration_secs': 0.514949} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.325101] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 580.325276] env[63538]: INFO nova.compute.manager [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Took 13.39 seconds to spawn the instance on the hypervisor. [ 580.325745] env[63538]: DEBUG nova.compute.manager [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 580.326646] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b497e8-e890-435e-8c60-42f48ee27d0a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.352260] env[63538]: DEBUG nova.compute.utils [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 580.355560] env[63538]: DEBUG nova.compute.manager [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 580.355756] env[63538]: DEBUG nova.network.neutron [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 580.374197] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "refresh_cache-c8a02fa6-5232-4dde-b6dd-0da1089b6bbf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 580.374359] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquired lock "refresh_cache-c8a02fa6-5232-4dde-b6dd-0da1089b6bbf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 580.374508] env[63538]: DEBUG nova.network.neutron [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 580.387780] env[63538]: DEBUG nova.network.neutron [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Successfully created port: 5e9b18aa-a332-400c-9c74-aed76633d8b5 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 580.429688] env[63538]: DEBUG oslo_vmware.api [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Task: {'id': task-5100341, 'name': PowerOnVM_Task, 'duration_secs': 0.677241} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.429956] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 580.430292] env[63538]: INFO nova.compute.manager [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Took 6.06 seconds to spawn the instance on the hypervisor. [ 580.430500] env[63538]: DEBUG nova.compute.manager [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 580.433128] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcda7895-f221-4a63-8cf7-37f94c853a80 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.470266] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100344, 'name': Rename_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.490035] env[63538]: DEBUG nova.policy [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3cdd111b74fa4b94be6bc6352862c282', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '364d736fc32f4f1caf486e2fa826fa97', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 580.721548] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f4d46a63-8300-4408-8003-20195fca89cb tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Lock "174368d1-9910-495b-a923-842e0440fd01" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.398s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 580.722499] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "174368d1-9910-495b-a923-842e0440fd01" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 4.418s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.722794] env[63538]: INFO nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 174368d1-9910-495b-a923-842e0440fd01] During sync_power_state the instance has a pending task (spawning). Skip. [ 580.722899] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "174368d1-9910-495b-a923-842e0440fd01" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 580.734815] env[63538]: DEBUG oslo_vmware.api [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Task: {'id': task-5100343, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.570515} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.735086] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] e1710498-0616-4862-afc0-6e452dc19882/e1710498-0616-4862-afc0-6e452dc19882.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 580.736178] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 580.736178] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8afdf6e7-496c-4fdf-8ff2-9237beba564d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.747628] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Acquiring lock "1e33b68e-8509-4ec4-8ec4-dc758aae9a5a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.747889] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Lock "1e33b68e-8509-4ec4-8ec4-dc758aae9a5a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.748196] env[63538]: DEBUG oslo_vmware.api [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Waiting for the task: (returnval){ [ 580.748196] env[63538]: value = "task-5100345" [ 580.748196] env[63538]: _type = "Task" [ 580.748196] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.759160] env[63538]: DEBUG oslo_vmware.api [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Task: {'id': task-5100345, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.853669] env[63538]: INFO nova.compute.manager [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Took 20.01 seconds to build instance. [ 580.857628] env[63538]: DEBUG nova.compute.manager [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 580.960036] env[63538]: INFO nova.compute.manager [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Took 18.29 seconds to build instance. [ 580.963035] env[63538]: DEBUG nova.network.neutron [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 580.968292] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100344, 'name': Rename_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.214193] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b07b0ea-df91-429b-8a7d-dccc8c43fc66 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.224268] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-505adc80-e492-4231-9fe7-3ca4477c77cd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.230475] env[63538]: DEBUG nova.compute.manager [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 581.269279] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-237cc4f6-a6dd-499e-b2eb-7a835fb64d7a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.273599] env[63538]: DEBUG nova.network.neutron [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Successfully created port: 2aa9f276-7b52-46d9-9394-e3be1142ea88 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 581.278907] env[63538]: DEBUG nova.network.neutron [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Updating instance_info_cache with network_info: [{"id": "8d80ee33-5e67-4651-a9b1-1f58ca92fb2e", "address": "fa:16:3e:e1:60:79", "network": {"id": "2801e625-b23c-455a-80ae-80f2a4f34148", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1431071033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1f0c999ede418c866074d9276050ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d80ee33-5e", "ovs_interfaceid": "8d80ee33-5e67-4651-a9b1-1f58ca92fb2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 581.285982] env[63538]: DEBUG oslo_vmware.api [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Task: {'id': task-5100345, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062795} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.287513] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f92efdf-8611-4032-90bf-1615dbe0145a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.293330] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 581.293985] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f08771f-9b47-49c0-92c3-3b19b70fb376 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.311477] env[63538]: DEBUG nova.compute.provider_tree [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 581.336557] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] e1710498-0616-4862-afc0-6e452dc19882/e1710498-0616-4862-afc0-6e452dc19882.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 581.337774] env[63538]: DEBUG nova.scheduler.client.report [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 581.342481] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-011282e6-1c7a-4ff2-9e49-e67806c42b28 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.356068] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5d69252f-fdbe-4efb-93b8-f987d80c9192 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Lock "15a8424e-27a6-4b77-b57c-d163345b8fed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.529s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.360424] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "15a8424e-27a6-4b77-b57c-d163345b8fed" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 5.053s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.360424] env[63538]: INFO nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] During sync_power_state the instance has a pending task (spawning). Skip. [ 581.360424] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "15a8424e-27a6-4b77-b57c-d163345b8fed" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.368694] env[63538]: DEBUG oslo_vmware.api [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Waiting for the task: (returnval){ [ 581.368694] env[63538]: value = "task-5100346" [ 581.368694] env[63538]: _type = "Task" [ 581.368694] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.383227] env[63538]: DEBUG oslo_vmware.api [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Task: {'id': task-5100346, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.462654] env[63538]: DEBUG oslo_concurrency.lockutils [None req-79fee368-a1c2-4072-b862-36f98eaf8126 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Lock "d99b7b8e-633f-4fba-bce6-9b8e9e9892d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.803s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.464148] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "d99b7b8e-633f-4fba-bce6-9b8e9e9892d1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 5.159s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.464255] env[63538]: INFO nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] During sync_power_state the instance has a pending task (spawning). Skip. [ 581.464419] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "d99b7b8e-633f-4fba-bce6-9b8e9e9892d1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.469956] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100344, 'name': Rename_Task, 'duration_secs': 1.219486} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.469956] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 581.470227] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d3bda919-24e4-4f5f-94d0-d924a498596a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.477499] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Waiting for the task: (returnval){ [ 581.477499] env[63538]: value = "task-5100347" [ 581.477499] env[63538]: _type = "Task" [ 581.477499] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.487763] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100347, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.781711] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.787912] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Releasing lock "refresh_cache-c8a02fa6-5232-4dde-b6dd-0da1089b6bbf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 581.788594] env[63538]: DEBUG nova.compute.manager [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Instance network_info: |[{"id": "8d80ee33-5e67-4651-a9b1-1f58ca92fb2e", "address": "fa:16:3e:e1:60:79", "network": {"id": "2801e625-b23c-455a-80ae-80f2a4f34148", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1431071033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1f0c999ede418c866074d9276050ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d80ee33-5e", "ovs_interfaceid": "8d80ee33-5e67-4651-a9b1-1f58ca92fb2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 581.790299] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:60:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6edb8eae-1113-49d0-84f7-9fd9f82b26fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8d80ee33-5e67-4651-a9b1-1f58ca92fb2e', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 581.806276] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Creating folder: Project (7c1f0c999ede418c866074d9276050ff). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 581.806653] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b81e5fee-9332-4e37-a829-9d646fa73b97 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.824302] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Created folder: Project (7c1f0c999ede418c866074d9276050ff) in parent group-v992234. [ 581.825227] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Creating folder: Instances. Parent ref: group-v992250. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 581.826381] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a562934-03df-4f7b-b37a-e870c9dd87c7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.848141] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Created folder: Instances in parent group-v992250. [ 581.848141] env[63538]: DEBUG oslo.service.loopingcall [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 581.848141] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 581.848141] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-281e158b-c1a1-4bf0-9a83-2abda244a3af {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.877315] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.031s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.877315] env[63538]: DEBUG nova.compute.manager [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 581.885266] env[63538]: DEBUG nova.compute.manager [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 581.888841] env[63538]: DEBUG nova.compute.manager [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 581.892589] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.581s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.894082] env[63538]: INFO nova.compute.claims [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 581.915294] env[63538]: DEBUG oslo_vmware.api [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Task: {'id': task-5100346, 'name': ReconfigVM_Task, 'duration_secs': 0.531207} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.916832] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Reconfigured VM instance instance-00000004 to attach disk [datastore1] e1710498-0616-4862-afc0-6e452dc19882/e1710498-0616-4862-afc0-6e452dc19882.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 581.917334] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 581.917334] env[63538]: value = "task-5100350" [ 581.917334] env[63538]: _type = "Task" [ 581.917334] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.917986] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1a851dde-3aaf-40a3-b358-c53a7c0663a9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.943443] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100350, 'name': CreateVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.944203] env[63538]: DEBUG oslo_vmware.api [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Waiting for the task: (returnval){ [ 581.944203] env[63538]: value = "task-5100351" [ 581.944203] env[63538]: _type = "Task" [ 581.944203] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.955385] env[63538]: DEBUG nova.compute.manager [req-1bd51eae-85ee-4e41-8c72-109be6b7fe8f req-dd6b8b0b-184a-49fb-9836-b87b7fff05c9 service nova] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Received event network-changed-8d80ee33-5e67-4651-a9b1-1f58ca92fb2e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 581.957911] env[63538]: DEBUG nova.compute.manager [req-1bd51eae-85ee-4e41-8c72-109be6b7fe8f req-dd6b8b0b-184a-49fb-9836-b87b7fff05c9 service nova] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Refreshing instance network info cache due to event network-changed-8d80ee33-5e67-4651-a9b1-1f58ca92fb2e. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 581.957911] env[63538]: DEBUG oslo_concurrency.lockutils [req-1bd51eae-85ee-4e41-8c72-109be6b7fe8f req-dd6b8b0b-184a-49fb-9836-b87b7fff05c9 service nova] Acquiring lock "refresh_cache-c8a02fa6-5232-4dde-b6dd-0da1089b6bbf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 581.957911] env[63538]: DEBUG oslo_concurrency.lockutils [req-1bd51eae-85ee-4e41-8c72-109be6b7fe8f req-dd6b8b0b-184a-49fb-9836-b87b7fff05c9 service nova] Acquired lock "refresh_cache-c8a02fa6-5232-4dde-b6dd-0da1089b6bbf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 581.957911] env[63538]: DEBUG nova.network.neutron [req-1bd51eae-85ee-4e41-8c72-109be6b7fe8f req-dd6b8b0b-184a-49fb-9836-b87b7fff05c9 service nova] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Refreshing network info cache for port 8d80ee33-5e67-4651-a9b1-1f58ca92fb2e {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 581.974146] env[63538]: DEBUG nova.compute.manager [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 581.984488] env[63538]: DEBUG nova.virt.hardware [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 581.984488] env[63538]: DEBUG nova.virt.hardware [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 581.984488] env[63538]: DEBUG nova.virt.hardware [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 581.984488] env[63538]: DEBUG nova.virt.hardware [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 581.984978] env[63538]: DEBUG nova.virt.hardware [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 581.984978] env[63538]: DEBUG nova.virt.hardware [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 581.984978] env[63538]: DEBUG nova.virt.hardware [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 581.984978] env[63538]: DEBUG nova.virt.hardware [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 581.984978] env[63538]: DEBUG nova.virt.hardware [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 581.985237] env[63538]: DEBUG nova.virt.hardware [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 581.985237] env[63538]: DEBUG nova.virt.hardware [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 581.989327] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21ceb3dd-31ba-43f6-8cd4-f25bc3215565 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.008625] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0838764c-b807-43c9-ba64-e2f88d7248eb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.014699] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100347, 'name': PowerOnVM_Task} progress is 37%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.397790] env[63538]: DEBUG nova.compute.utils [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 582.402639] env[63538]: DEBUG nova.compute.manager [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 582.402639] env[63538]: DEBUG nova.network.neutron [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 582.423185] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.435634] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100350, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.471257] env[63538]: DEBUG oslo_vmware.api [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Task: {'id': task-5100351, 'name': Rename_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.505928] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100347, 'name': PowerOnVM_Task} progress is 91%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.507591] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.625477] env[63538]: DEBUG nova.policy [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9c4dcefd65de48a582ffb683637bda94', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3dc18da1ea704eeaaeb62633c4f76ee8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 582.665819] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Acquiring lock "102c0463-fb64-4dda-914c-b98c8e9991ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.665819] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Lock "102c0463-fb64-4dda-914c-b98c8e9991ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.907043] env[63538]: DEBUG nova.compute.manager [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 582.939029] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100350, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.969935] env[63538]: DEBUG oslo_vmware.api [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Task: {'id': task-5100351, 'name': Rename_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.012101] env[63538]: DEBUG oslo_vmware.api [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100347, 'name': PowerOnVM_Task, 'duration_secs': 1.042788} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.012101] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 583.012633] env[63538]: INFO nova.compute.manager [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Took 13.68 seconds to spawn the instance on the hypervisor. [ 583.013284] env[63538]: DEBUG nova.compute.manager [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 583.014274] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc65c5a-964a-43bd-ac13-ddfc68d6a366 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.228020] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75062f1-748a-49a7-9f7d-a2294a1814ba {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.237548] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b082899a-6fb9-44d8-bf03-ce74bb76d7e7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.281312] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4632d987-08a9-4824-a0c4-94794ce7dea6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.290709] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bf46c66-dade-4d21-9fa3-6cd3031f64dd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.309684] env[63538]: DEBUG nova.compute.provider_tree [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 583.438783] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100350, 'name': CreateVM_Task, 'duration_secs': 1.461885} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.438927] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 583.441199] env[63538]: DEBUG oslo_vmware.service [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7753f41c-6e20-49d3-b093-da9d790f4549 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.454399] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.454581] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.455074] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 583.455453] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b3a1170-cc8a-45eb-97b9-fb187b948736 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.469271] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 583.469271] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ec6c84-5232-d930-c8eb-20a54158a5d2" [ 583.469271] env[63538]: _type = "Task" [ 583.469271] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.474383] env[63538]: DEBUG oslo_vmware.api [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Task: {'id': task-5100351, 'name': Rename_Task, 'duration_secs': 1.123264} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.479040] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 583.479040] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f2ced5db-9d46-4f53-b91d-fa83eb89e635 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.488837] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ec6c84-5232-d930-c8eb-20a54158a5d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.490934] env[63538]: DEBUG oslo_vmware.api [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Waiting for the task: (returnval){ [ 583.490934] env[63538]: value = "task-5100352" [ 583.490934] env[63538]: _type = "Task" [ 583.490934] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.501432] env[63538]: DEBUG oslo_vmware.api [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Task: {'id': task-5100352, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.540287] env[63538]: INFO nova.compute.manager [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Took 21.81 seconds to build instance. [ 583.815083] env[63538]: DEBUG nova.scheduler.client.report [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 583.830799] env[63538]: DEBUG nova.network.neutron [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Successfully created port: 47d19b83-6292-46e2-835f-1198ef52374c {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 583.920748] env[63538]: DEBUG nova.compute.manager [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 583.968492] env[63538]: DEBUG nova.virt.hardware [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 583.969602] env[63538]: DEBUG nova.virt.hardware [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 583.969602] env[63538]: DEBUG nova.virt.hardware [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 583.971531] env[63538]: DEBUG nova.virt.hardware [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 583.971531] env[63538]: DEBUG nova.virt.hardware [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 583.971531] env[63538]: DEBUG nova.virt.hardware [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 583.971531] env[63538]: DEBUG nova.virt.hardware [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 583.971531] env[63538]: DEBUG nova.virt.hardware [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 583.971759] env[63538]: DEBUG nova.virt.hardware [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 583.972272] env[63538]: DEBUG nova.virt.hardware [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 583.972272] env[63538]: DEBUG nova.virt.hardware [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 583.973810] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0dbeb3-19e8-4e3a-9e52-284a900b7c73 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.003963] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb64dc6e-8da8-4363-a4e4-69d763c5e69f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.010912] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.010912] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 584.011213] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.011213] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.011386] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 584.012503] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9e105abc-118b-46e0-9dd1-534d9f969863 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.028020] env[63538]: DEBUG oslo_vmware.api [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Task: {'id': task-5100352, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.037425] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 584.037586] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 584.038648] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4819a264-cb1b-4f7f-8e09-408dcb73819f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.043463] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e298aae8-c14a-41a5-bb96-ea19882b6997 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Lock "4b8fb9ad-a366-423d-81b1-04c5e4ec9264" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.328s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 584.044880] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "4b8fb9ad-a366-423d-81b1-04c5e4ec9264" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 7.740s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.045257] env[63538]: INFO nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] During sync_power_state the instance has a pending task (spawning). Skip. [ 584.045445] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "4b8fb9ad-a366-423d-81b1-04c5e4ec9264" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 584.050594] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6c9c85e-2e85-4f6f-aef4-c2c302ec4258 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.058708] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 584.058708] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bef96f-b671-744c-72e7-41ed75a3edf8" [ 584.058708] env[63538]: _type = "Task" [ 584.058708] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.070763] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bef96f-b671-744c-72e7-41ed75a3edf8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.212622] env[63538]: DEBUG nova.network.neutron [req-1bd51eae-85ee-4e41-8c72-109be6b7fe8f req-dd6b8b0b-184a-49fb-9836-b87b7fff05c9 service nova] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Updated VIF entry in instance network info cache for port 8d80ee33-5e67-4651-a9b1-1f58ca92fb2e. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 584.212772] env[63538]: DEBUG nova.network.neutron [req-1bd51eae-85ee-4e41-8c72-109be6b7fe8f req-dd6b8b0b-184a-49fb-9836-b87b7fff05c9 service nova] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Updating instance_info_cache with network_info: [{"id": "8d80ee33-5e67-4651-a9b1-1f58ca92fb2e", "address": "fa:16:3e:e1:60:79", "network": {"id": "2801e625-b23c-455a-80ae-80f2a4f34148", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1431071033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1f0c999ede418c866074d9276050ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d80ee33-5e", "ovs_interfaceid": "8d80ee33-5e67-4651-a9b1-1f58ca92fb2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.320948] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.429s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 584.321553] env[63538]: DEBUG nova.compute.manager [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 584.325352] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 7.508s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.325545] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 584.325702] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63538) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 584.326384] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.546s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.328361] env[63538]: INFO nova.compute.claims [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 584.332234] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3a655b-d0b8-40af-a622-8d06db1ec2f0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.344063] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a30ba11-115b-4d78-945c-8332d5ca97e8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.364266] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44d73380-175a-47fb-98fe-6c21bb07f13c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.377036] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07e4dab4-62d9-4d71-9625-f59d217702f1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.423039] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180528MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=63538) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 584.423187] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.457674] env[63538]: DEBUG nova.network.neutron [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Successfully updated port: 2aa9f276-7b52-46d9-9394-e3be1142ea88 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 584.510031] env[63538]: DEBUG oslo_vmware.api [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Task: {'id': task-5100352, 'name': PowerOnVM_Task, 'duration_secs': 0.905108} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.510031] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 584.510031] env[63538]: INFO nova.compute.manager [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Took 12.59 seconds to spawn the instance on the hypervisor. [ 584.510252] env[63538]: DEBUG nova.compute.manager [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 584.511133] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c3d4817-bd53-47c4-965a-49eb79c517e5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.547461] env[63538]: DEBUG nova.compute.manager [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 584.578813] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Preparing fetch location {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 584.579111] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Creating directory with path [datastore2] vmware_temp/c70fefcd-00ed-4e7c-9304-6e022759d1cc/faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 584.579380] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-47523359-974d-49bc-ace8-46f7a47cec98 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.597068] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Created directory with path [datastore2] vmware_temp/c70fefcd-00ed-4e7c-9304-6e022759d1cc/faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 584.597068] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Fetch image to [datastore2] vmware_temp/c70fefcd-00ed-4e7c-9304-6e022759d1cc/faabbca4-e27b-433a-b93d-f059fd73bc92/tmp-sparse.vmdk {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 584.597068] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Downloading image file data faabbca4-e27b-433a-b93d-f059fd73bc92 to [datastore2] vmware_temp/c70fefcd-00ed-4e7c-9304-6e022759d1cc/faabbca4-e27b-433a-b93d-f059fd73bc92/tmp-sparse.vmdk on the data store datastore2 {{(pid=63538) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 584.598303] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0803e400-34a0-41e9-b405-7349ab1bad31 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.607919] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3778796-66e3-49ca-b5c1-9a9c0ef3a342 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.621555] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25bc165-8dce-4cf9-8517-e41f77184f12 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.657806] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-632689fc-6567-4074-b48a-1898019fefc2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.666253] env[63538]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7669f1f3-eb23-41d5-992f-ec03a8833e2e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.694803] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Downloading image file data faabbca4-e27b-433a-b93d-f059fd73bc92 to the data store datastore2 {{(pid=63538) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 584.717338] env[63538]: DEBUG oslo_concurrency.lockutils [req-1bd51eae-85ee-4e41-8c72-109be6b7fe8f req-dd6b8b0b-184a-49fb-9836-b87b7fff05c9 service nova] Releasing lock "refresh_cache-c8a02fa6-5232-4dde-b6dd-0da1089b6bbf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.765578] env[63538]: DEBUG oslo_vmware.rw_handles [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c70fefcd-00ed-4e7c-9304-6e022759d1cc/faabbca4-e27b-433a-b93d-f059fd73bc92/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=63538) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 584.826320] env[63538]: DEBUG nova.network.neutron [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Successfully updated port: 5e9b18aa-a332-400c-9c74-aed76633d8b5 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 584.836510] env[63538]: DEBUG nova.compute.utils [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 584.837766] env[63538]: DEBUG nova.compute.manager [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 584.837766] env[63538]: DEBUG nova.network.neutron [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 584.960854] env[63538]: DEBUG oslo_concurrency.lockutils [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Acquiring lock "refresh_cache-e3ba860b-afb8-4843-9d99-049dce205f9f" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.961211] env[63538]: DEBUG oslo_concurrency.lockutils [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Acquired lock "refresh_cache-e3ba860b-afb8-4843-9d99-049dce205f9f" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.961269] env[63538]: DEBUG nova.network.neutron [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 585.038356] env[63538]: INFO nova.compute.manager [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Took 23.01 seconds to build instance. [ 585.078166] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.174228] env[63538]: DEBUG nova.policy [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f91d4ce5a5724fb7b785591ae831506d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c1f0c999ede418c866074d9276050ff', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 585.332347] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Acquiring lock "refresh_cache-0a7c34e0-1acc-4761-804a-eb9ee00fdd77" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.332347] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Acquired lock "refresh_cache-0a7c34e0-1acc-4761-804a-eb9ee00fdd77" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.332347] env[63538]: DEBUG nova.network.neutron [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 585.349787] env[63538]: DEBUG nova.compute.manager [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 585.538124] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8d14d533-e264-43c7-bee2-6651013be34a tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Lock "e1710498-0616-4862-afc0-6e452dc19882" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.521s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.543031] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "e1710498-0616-4862-afc0-6e452dc19882" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 9.235s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.543031] env[63538]: INFO nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: e1710498-0616-4862-afc0-6e452dc19882] During sync_power_state the instance has a pending task (spawning). Skip. [ 585.543031] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "e1710498-0616-4862-afc0-6e452dc19882" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.573904] env[63538]: DEBUG nova.network.neutron [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 585.670289] env[63538]: DEBUG oslo_vmware.rw_handles [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Completed reading data from the image iterator. {{(pid=63538) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 585.670289] env[63538]: DEBUG oslo_vmware.rw_handles [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c70fefcd-00ed-4e7c-9304-6e022759d1cc/faabbca4-e27b-433a-b93d-f059fd73bc92/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=63538) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 585.716426] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db07585-8226-496b-a091-03dec1e01f01 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.726178] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b59f97c-6131-45f3-abcd-0bfb08e44570 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.770595] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eecfd7e-462e-4876-a954-8fca102d2e20 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.779699] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee899ff4-e9b2-4429-b342-b4bd92129f25 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.785783] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Downloaded image file data faabbca4-e27b-433a-b93d-f059fd73bc92 to vmware_temp/c70fefcd-00ed-4e7c-9304-6e022759d1cc/faabbca4-e27b-433a-b93d-f059fd73bc92/tmp-sparse.vmdk on the data store datastore2 {{(pid=63538) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 585.787804] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Caching image {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 585.788201] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Copying Virtual Disk [datastore2] vmware_temp/c70fefcd-00ed-4e7c-9304-6e022759d1cc/faabbca4-e27b-433a-b93d-f059fd73bc92/tmp-sparse.vmdk to [datastore2] vmware_temp/c70fefcd-00ed-4e7c-9304-6e022759d1cc/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 585.788731] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6135b1a8-a077-48e1-8bef-6a81ec1314a7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.804842] env[63538]: DEBUG nova.compute.provider_tree [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 585.812585] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 585.812585] env[63538]: value = "task-5100353" [ 585.812585] env[63538]: _type = "Task" [ 585.812585] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.828697] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100353, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.016243] env[63538]: DEBUG nova.network.neutron [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 586.029952] env[63538]: DEBUG nova.network.neutron [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Updating instance_info_cache with network_info: [{"id": "2aa9f276-7b52-46d9-9394-e3be1142ea88", "address": "fa:16:3e:e9:47:8f", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.131", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2aa9f276-7b", "ovs_interfaceid": "2aa9f276-7b52-46d9-9394-e3be1142ea88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.041389] env[63538]: DEBUG nova.compute.manager [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 586.308147] env[63538]: DEBUG nova.scheduler.client.report [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 586.325528] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100353, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.368285] env[63538]: DEBUG nova.compute.manager [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 586.400618] env[63538]: DEBUG nova.virt.hardware [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 586.400978] env[63538]: DEBUG nova.virt.hardware [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 586.401163] env[63538]: DEBUG nova.virt.hardware [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 586.401462] env[63538]: DEBUG nova.virt.hardware [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 586.401935] env[63538]: DEBUG nova.virt.hardware [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 586.402215] env[63538]: DEBUG nova.virt.hardware [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 586.402834] env[63538]: DEBUG nova.virt.hardware [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 586.403181] env[63538]: DEBUG nova.virt.hardware [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 586.403251] env[63538]: DEBUG nova.virt.hardware [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 586.403426] env[63538]: DEBUG nova.virt.hardware [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 586.403733] env[63538]: DEBUG nova.virt.hardware [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 586.404506] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a5783ef-80cc-4004-9799-6b4489abade2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.417970] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5adc3274-76b3-4c7b-bf33-43314b1cf260 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.449985] env[63538]: DEBUG nova.compute.manager [req-3737a801-da84-478e-8884-a9d311f5cd59 req-35d4689f-9aaa-4751-84ee-f265f2673848 service nova] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Received event network-vif-plugged-2aa9f276-7b52-46d9-9394-e3be1142ea88 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 586.450240] env[63538]: DEBUG oslo_concurrency.lockutils [req-3737a801-da84-478e-8884-a9d311f5cd59 req-35d4689f-9aaa-4751-84ee-f265f2673848 service nova] Acquiring lock "e3ba860b-afb8-4843-9d99-049dce205f9f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.450456] env[63538]: DEBUG oslo_concurrency.lockutils [req-3737a801-da84-478e-8884-a9d311f5cd59 req-35d4689f-9aaa-4751-84ee-f265f2673848 service nova] Lock "e3ba860b-afb8-4843-9d99-049dce205f9f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.450622] env[63538]: DEBUG oslo_concurrency.lockutils [req-3737a801-da84-478e-8884-a9d311f5cd59 req-35d4689f-9aaa-4751-84ee-f265f2673848 service nova] Lock "e3ba860b-afb8-4843-9d99-049dce205f9f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 586.451054] env[63538]: DEBUG nova.compute.manager [req-3737a801-da84-478e-8884-a9d311f5cd59 req-35d4689f-9aaa-4751-84ee-f265f2673848 service nova] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] No waiting events found dispatching network-vif-plugged-2aa9f276-7b52-46d9-9394-e3be1142ea88 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 586.451054] env[63538]: WARNING nova.compute.manager [req-3737a801-da84-478e-8884-a9d311f5cd59 req-35d4689f-9aaa-4751-84ee-f265f2673848 service nova] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Received unexpected event network-vif-plugged-2aa9f276-7b52-46d9-9394-e3be1142ea88 for instance with vm_state building and task_state spawning. [ 586.533953] env[63538]: DEBUG oslo_concurrency.lockutils [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Releasing lock "refresh_cache-e3ba860b-afb8-4843-9d99-049dce205f9f" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.533953] env[63538]: DEBUG nova.compute.manager [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Instance network_info: |[{"id": "2aa9f276-7b52-46d9-9394-e3be1142ea88", "address": "fa:16:3e:e9:47:8f", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.131", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2aa9f276-7b", "ovs_interfaceid": "2aa9f276-7b52-46d9-9394-e3be1142ea88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 586.535649] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:47:8f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24144f5a-050a-4f1e-8d8c-774dc16dc791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2aa9f276-7b52-46d9-9394-e3be1142ea88', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 586.549182] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Creating folder: Project (364d736fc32f4f1caf486e2fa826fa97). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 586.552689] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1a9900b9-938b-4446-839f-7641a1301e61 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.579929] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Created folder: Project (364d736fc32f4f1caf486e2fa826fa97) in parent group-v992234. [ 586.579929] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Creating folder: Instances. Parent ref: group-v992253. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 586.580889] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-62fa8af7-d601-494e-8e2f-ddb9485191fd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.590517] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.593839] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Created folder: Instances in parent group-v992253. [ 586.593956] env[63538]: DEBUG oslo.service.loopingcall [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 586.594222] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 586.594425] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0752d0ab-0827-462a-91d6-d7a525dc3d68 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.618584] env[63538]: DEBUG nova.compute.manager [None req-0895ba55-6994-487e-8e36-5c372f26dffb tempest-ServerDiagnosticsV248Test-2070800695 tempest-ServerDiagnosticsV248Test-2070800695-project-admin] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 586.620915] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b1043c4-5a6c-4346-9027-fa7bb53f56fd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.630498] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 586.630498] env[63538]: value = "task-5100356" [ 586.630498] env[63538]: _type = "Task" [ 586.630498] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.637214] env[63538]: INFO nova.compute.manager [None req-0895ba55-6994-487e-8e36-5c372f26dffb tempest-ServerDiagnosticsV248Test-2070800695 tempest-ServerDiagnosticsV248Test-2070800695-project-admin] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Retrieving diagnostics [ 586.638574] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28440fe0-46e8-4eac-adb0-8bafe83dc138 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.646199] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100356, 'name': CreateVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.743688] env[63538]: DEBUG nova.network.neutron [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Successfully created port: 5b823d56-3162-4875-a202-4526e8d9e433 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 586.759241] env[63538]: DEBUG nova.network.neutron [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Updating instance_info_cache with network_info: [{"id": "5e9b18aa-a332-400c-9c74-aed76633d8b5", "address": "fa:16:3e:93:27:04", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.53", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e9b18aa-a3", "ovs_interfaceid": "5e9b18aa-a332-400c-9c74-aed76633d8b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.814046] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.488s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 586.814577] env[63538]: DEBUG nova.compute.manager [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 586.817468] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.395s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.819602] env[63538]: INFO nova.compute.claims [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 586.833814] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100353, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.143925] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100356, 'name': CreateVM_Task, 'duration_secs': 0.468057} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.144316] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 587.146032] env[63538]: DEBUG oslo_concurrency.lockutils [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 587.146032] env[63538]: DEBUG oslo_concurrency.lockutils [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.146032] env[63538]: DEBUG oslo_concurrency.lockutils [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 587.149183] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a00c9156-fae5-4e82-9b2b-cba8cb4d1cc8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.155618] env[63538]: DEBUG oslo_vmware.api [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Waiting for the task: (returnval){ [ 587.155618] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523e0694-1bc9-0eb0-0c7f-8e49469970a4" [ 587.155618] env[63538]: _type = "Task" [ 587.155618] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.172847] env[63538]: DEBUG oslo_vmware.api [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523e0694-1bc9-0eb0-0c7f-8e49469970a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.259778] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Releasing lock "refresh_cache-0a7c34e0-1acc-4761-804a-eb9ee00fdd77" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.261135] env[63538]: DEBUG nova.compute.manager [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Instance network_info: |[{"id": "5e9b18aa-a332-400c-9c74-aed76633d8b5", "address": "fa:16:3e:93:27:04", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.53", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e9b18aa-a3", "ovs_interfaceid": "5e9b18aa-a332-400c-9c74-aed76633d8b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 587.261334] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:27:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24144f5a-050a-4f1e-8d8c-774dc16dc791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5e9b18aa-a332-400c-9c74-aed76633d8b5', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 587.276368] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Creating folder: Project (64606c99619d48e1a165d3ab701caf7a). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 587.276812] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aa817b74-3d59-47fb-bb4e-95a55e4e4db1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.291533] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Created folder: Project (64606c99619d48e1a165d3ab701caf7a) in parent group-v992234. [ 587.292482] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Creating folder: Instances. Parent ref: group-v992256. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 587.293052] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5aa32d20-3b70-47e2-aa6d-680184dc5688 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.308323] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Created folder: Instances in parent group-v992256. [ 587.308391] env[63538]: DEBUG oslo.service.loopingcall [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 587.308564] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 587.309030] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-53fdbc74-d6bb-4230-846a-f141fbde2ac6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.337457] env[63538]: DEBUG nova.compute.utils [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 587.346967] env[63538]: DEBUG nova.compute.manager [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 587.347272] env[63538]: DEBUG nova.network.neutron [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 587.358493] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100353, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.305747} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.363850] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Copied Virtual Disk [datastore2] vmware_temp/c70fefcd-00ed-4e7c-9304-6e022759d1cc/faabbca4-e27b-433a-b93d-f059fd73bc92/tmp-sparse.vmdk to [datastore2] vmware_temp/c70fefcd-00ed-4e7c-9304-6e022759d1cc/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 587.364139] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Deleting the datastore file [datastore2] vmware_temp/c70fefcd-00ed-4e7c-9304-6e022759d1cc/faabbca4-e27b-433a-b93d-f059fd73bc92/tmp-sparse.vmdk {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 587.366030] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 587.366030] env[63538]: value = "task-5100359" [ 587.366030] env[63538]: _type = "Task" [ 587.366030] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.366030] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5481946b-81e1-4968-958f-a9b07c9d31c7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.380088] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100359, 'name': CreateVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.382228] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 587.382228] env[63538]: value = "task-5100360" [ 587.382228] env[63538]: _type = "Task" [ 587.382228] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.394890] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100360, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.456021] env[63538]: DEBUG nova.compute.manager [req-17f16b4b-a5ba-4d91-8ade-98c9583c2a21 req-07faa6c4-b814-4de9-8f62-1ecf41f1b9b2 service nova] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Received event network-vif-plugged-5e9b18aa-a332-400c-9c74-aed76633d8b5 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 587.456510] env[63538]: DEBUG oslo_concurrency.lockutils [req-17f16b4b-a5ba-4d91-8ade-98c9583c2a21 req-07faa6c4-b814-4de9-8f62-1ecf41f1b9b2 service nova] Acquiring lock "0a7c34e0-1acc-4761-804a-eb9ee00fdd77-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.456875] env[63538]: DEBUG oslo_concurrency.lockutils [req-17f16b4b-a5ba-4d91-8ade-98c9583c2a21 req-07faa6c4-b814-4de9-8f62-1ecf41f1b9b2 service nova] Lock "0a7c34e0-1acc-4761-804a-eb9ee00fdd77-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 587.457474] env[63538]: DEBUG oslo_concurrency.lockutils [req-17f16b4b-a5ba-4d91-8ade-98c9583c2a21 req-07faa6c4-b814-4de9-8f62-1ecf41f1b9b2 service nova] Lock "0a7c34e0-1acc-4761-804a-eb9ee00fdd77-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 587.457773] env[63538]: DEBUG nova.compute.manager [req-17f16b4b-a5ba-4d91-8ade-98c9583c2a21 req-07faa6c4-b814-4de9-8f62-1ecf41f1b9b2 service nova] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] No waiting events found dispatching network-vif-plugged-5e9b18aa-a332-400c-9c74-aed76633d8b5 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 587.460262] env[63538]: WARNING nova.compute.manager [req-17f16b4b-a5ba-4d91-8ade-98c9583c2a21 req-07faa6c4-b814-4de9-8f62-1ecf41f1b9b2 service nova] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Received unexpected event network-vif-plugged-5e9b18aa-a332-400c-9c74-aed76633d8b5 for instance with vm_state building and task_state spawning. [ 587.560579] env[63538]: DEBUG nova.policy [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e81b8e6639bb436ca91e68b2e7248f92', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c5e6ed681ed4078bd9115b30f419d9a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 587.622248] env[63538]: DEBUG nova.compute.manager [req-8a38f5ea-b2ee-4b97-91a9-5a237a9fba5d req-e845cb34-1955-4d2e-a6be-e72b19715d93 service nova] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Received event network-changed-2aa9f276-7b52-46d9-9394-e3be1142ea88 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 587.625481] env[63538]: DEBUG nova.compute.manager [req-8a38f5ea-b2ee-4b97-91a9-5a237a9fba5d req-e845cb34-1955-4d2e-a6be-e72b19715d93 service nova] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Refreshing instance network info cache due to event network-changed-2aa9f276-7b52-46d9-9394-e3be1142ea88. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 587.625481] env[63538]: DEBUG oslo_concurrency.lockutils [req-8a38f5ea-b2ee-4b97-91a9-5a237a9fba5d req-e845cb34-1955-4d2e-a6be-e72b19715d93 service nova] Acquiring lock "refresh_cache-e3ba860b-afb8-4843-9d99-049dce205f9f" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 587.625481] env[63538]: DEBUG oslo_concurrency.lockutils [req-8a38f5ea-b2ee-4b97-91a9-5a237a9fba5d req-e845cb34-1955-4d2e-a6be-e72b19715d93 service nova] Acquired lock "refresh_cache-e3ba860b-afb8-4843-9d99-049dce205f9f" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.625481] env[63538]: DEBUG nova.network.neutron [req-8a38f5ea-b2ee-4b97-91a9-5a237a9fba5d req-e845cb34-1955-4d2e-a6be-e72b19715d93 service nova] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Refreshing network info cache for port 2aa9f276-7b52-46d9-9394-e3be1142ea88 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 587.669785] env[63538]: DEBUG oslo_concurrency.lockutils [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.673670] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 587.673670] env[63538]: DEBUG oslo_concurrency.lockutils [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 587.853290] env[63538]: DEBUG nova.compute.manager [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 587.885046] env[63538]: DEBUG nova.network.neutron [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Successfully updated port: 47d19b83-6292-46e2-835f-1198ef52374c {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 587.905934] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100360, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034494} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.908403] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 587.908702] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Moving file from [datastore2] vmware_temp/c70fefcd-00ed-4e7c-9304-6e022759d1cc/faabbca4-e27b-433a-b93d-f059fd73bc92 to [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92. {{(pid=63538) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 587.908982] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100359, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.912653] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-df726ee1-6eb2-49c7-8889-96b3e4b4ee22 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.921570] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 587.921570] env[63538]: value = "task-5100361" [ 587.921570] env[63538]: _type = "Task" [ 587.921570] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.932431] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100361, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.178025] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e171514-52e5-40cc-aa84-a80c3a7f97c3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.190069] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfa5dd95-9a03-42d4-bf27-dcb2b8594369 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.228918] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789d05b9-3203-437d-8292-1a58be8af0cf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.240724] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b0a03e-ce0a-4377-a0d7-7cf332f7b633 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.258582] env[63538]: DEBUG nova.compute.provider_tree [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 588.385907] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100359, 'name': CreateVM_Task, 'duration_secs': 0.559471} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.385907] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 588.385907] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 588.385907] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.385907] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 588.385907] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea910135-5c4c-4540-a3c1-d4ea589f0bab {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.397019] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "refresh_cache-2e1b0bc7-3909-48e2-b9be-26822a57ee67" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 588.397019] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquired lock "refresh_cache-2e1b0bc7-3909-48e2-b9be-26822a57ee67" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.397019] env[63538]: DEBUG nova.network.neutron [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 588.402419] env[63538]: DEBUG oslo_vmware.api [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Waiting for the task: (returnval){ [ 588.402419] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]529326fa-c3fa-a2bb-48a3-6711708f331e" [ 588.402419] env[63538]: _type = "Task" [ 588.402419] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.412427] env[63538]: DEBUG oslo_vmware.api [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]529326fa-c3fa-a2bb-48a3-6711708f331e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.435147] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100361, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.043202} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.435261] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] File moved {{(pid=63538) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 588.435420] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Cleaning up location [datastore2] vmware_temp/c70fefcd-00ed-4e7c-9304-6e022759d1cc {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 588.435573] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Deleting the datastore file [datastore2] vmware_temp/c70fefcd-00ed-4e7c-9304-6e022759d1cc {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 588.436283] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1531f8b9-3475-482b-9ffc-9241a8e76080 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.443380] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 588.443380] env[63538]: value = "task-5100362" [ 588.443380] env[63538]: _type = "Task" [ 588.443380] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.457491] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100362, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.672066] env[63538]: DEBUG nova.network.neutron [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Successfully created port: a5dfe48b-4acc-472c-8e00-f936b4068ea5 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 588.800226] env[63538]: ERROR nova.scheduler.client.report [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [req-5bd409f3-d655-4bdb-9c3b-a274e323fe7c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f65218a4-1d3d-476a-9093-01cae92c8635. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5bd409f3-d655-4bdb-9c3b-a274e323fe7c"}]} [ 588.841029] env[63538]: DEBUG nova.scheduler.client.report [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Refreshing inventories for resource provider f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 588.866543] env[63538]: DEBUG nova.compute.manager [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 588.871292] env[63538]: DEBUG nova.scheduler.client.report [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Updating ProviderTree inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 588.872177] env[63538]: DEBUG nova.compute.provider_tree [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 588.892533] env[63538]: DEBUG nova.scheduler.client.report [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Refreshing aggregate associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, aggregates: None {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 588.917011] env[63538]: DEBUG nova.virt.hardware [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 588.917302] env[63538]: DEBUG nova.virt.hardware [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 588.917418] env[63538]: DEBUG nova.virt.hardware [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 588.917688] env[63538]: DEBUG nova.virt.hardware [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 588.917848] env[63538]: DEBUG nova.virt.hardware [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 588.918264] env[63538]: DEBUG nova.virt.hardware [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 588.918264] env[63538]: DEBUG nova.virt.hardware [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 588.918412] env[63538]: DEBUG nova.virt.hardware [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 588.918677] env[63538]: DEBUG nova.virt.hardware [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 588.918888] env[63538]: DEBUG nova.virt.hardware [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 588.919049] env[63538]: DEBUG nova.virt.hardware [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 588.920330] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f517b3-09df-4c4c-b38d-97aa166b68c6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.938726] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed4b92b6-c121-4f69-965f-3a9a0a79d37b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.944927] env[63538]: DEBUG oslo_vmware.api [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]529326fa-c3fa-a2bb-48a3-6711708f331e, 'name': SearchDatastore_Task, 'duration_secs': 0.043086} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.946246] env[63538]: DEBUG nova.scheduler.client.report [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Refreshing trait associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 588.950403] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 588.950849] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 588.950936] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 588.983317] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100362, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.105248} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.983317] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 588.983317] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f959944-9176-4676-b4ac-9847444058e5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.996424] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 588.996424] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524097d6-f34e-244d-4b5d-62e9c1f2198b" [ 588.996424] env[63538]: _type = "Task" [ 588.996424] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.007631] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524097d6-f34e-244d-4b5d-62e9c1f2198b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.171599] env[63538]: DEBUG nova.network.neutron [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 589.382314] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cccbd45-91b6-4074-b33d-3df561d70c1f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.393460] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b2095b-4322-4fed-8d78-850b484db9ff {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.435234] env[63538]: DEBUG nova.network.neutron [req-8a38f5ea-b2ee-4b97-91a9-5a237a9fba5d req-e845cb34-1955-4d2e-a6be-e72b19715d93 service nova] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Updated VIF entry in instance network info cache for port 2aa9f276-7b52-46d9-9394-e3be1142ea88. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 589.438030] env[63538]: DEBUG nova.network.neutron [req-8a38f5ea-b2ee-4b97-91a9-5a237a9fba5d req-e845cb34-1955-4d2e-a6be-e72b19715d93 service nova] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Updating instance_info_cache with network_info: [{"id": "2aa9f276-7b52-46d9-9394-e3be1142ea88", "address": "fa:16:3e:e9:47:8f", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.131", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2aa9f276-7b", "ovs_interfaceid": "2aa9f276-7b52-46d9-9394-e3be1142ea88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.442643] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d86f96-57e4-4d6e-be4a-ed990d34ec69 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.455616] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3951fb0-06a5-4b23-a1b0-64cc5b65b6ba {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.478320] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Acquiring lock "15a8424e-27a6-4b77-b57c-d163345b8fed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.478320] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Lock "15a8424e-27a6-4b77-b57c-d163345b8fed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.478320] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Acquiring lock "15a8424e-27a6-4b77-b57c-d163345b8fed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.478320] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Lock "15a8424e-27a6-4b77-b57c-d163345b8fed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.478631] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Lock "15a8424e-27a6-4b77-b57c-d163345b8fed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 589.480318] env[63538]: DEBUG nova.compute.provider_tree [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 589.481611] env[63538]: INFO nova.compute.manager [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Terminating instance [ 589.484176] env[63538]: DEBUG nova.compute.manager [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 589.484176] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 589.485073] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a456bca-d47e-4d00-927d-e31c195928ff {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.493981] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 589.494243] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-849fc1d1-efee-483a-b058-0f6d98bdcaf5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.505206] env[63538]: DEBUG oslo_vmware.api [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Waiting for the task: (returnval){ [ 589.505206] env[63538]: value = "task-5100363" [ 589.505206] env[63538]: _type = "Task" [ 589.505206] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.514039] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524097d6-f34e-244d-4b5d-62e9c1f2198b, 'name': SearchDatastore_Task, 'duration_secs': 0.037557} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.514888] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 589.515092] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] c8a02fa6-5232-4dde-b6dd-0da1089b6bbf/c8a02fa6-5232-4dde-b6dd-0da1089b6bbf.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 589.515352] env[63538]: DEBUG oslo_concurrency.lockutils [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.515535] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 589.515743] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f187f5b7-e13b-4906-84aa-89866cb24db6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.523366] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5436bc36-135f-45de-b597-9ecde77efe1c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.523869] env[63538]: DEBUG oslo_vmware.api [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Task: {'id': task-5100363, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.530639] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 589.530639] env[63538]: value = "task-5100364" [ 589.530639] env[63538]: _type = "Task" [ 589.530639] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.535895] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 589.536102] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 589.537349] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54203412-82f5-4851-b303-8889d465aac3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.544355] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100364, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.548545] env[63538]: DEBUG oslo_vmware.api [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Waiting for the task: (returnval){ [ 589.548545] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5227892e-51ad-3cba-a61e-50918b064368" [ 589.548545] env[63538]: _type = "Task" [ 589.548545] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.561810] env[63538]: DEBUG oslo_vmware.api [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5227892e-51ad-3cba-a61e-50918b064368, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.890271] env[63538]: DEBUG nova.network.neutron [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Successfully updated port: 5b823d56-3162-4875-a202-4526e8d9e433 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 589.948978] env[63538]: DEBUG oslo_concurrency.lockutils [req-8a38f5ea-b2ee-4b97-91a9-5a237a9fba5d req-e845cb34-1955-4d2e-a6be-e72b19715d93 service nova] Releasing lock "refresh_cache-e3ba860b-afb8-4843-9d99-049dce205f9f" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 590.024280] env[63538]: DEBUG oslo_vmware.api [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Task: {'id': task-5100363, 'name': PowerOffVM_Task, 'duration_secs': 0.362074} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.024614] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 590.024786] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 590.025074] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-353a647a-9950-4857-a89a-abc0d0e6f90f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.045400] env[63538]: DEBUG nova.scheduler.client.report [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Updated inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 with generation 20 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 590.046234] env[63538]: DEBUG nova.compute.provider_tree [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Updating resource provider f65218a4-1d3d-476a-9093-01cae92c8635 generation from 20 to 21 during operation: update_inventory {{(pid=63538) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 590.046552] env[63538]: DEBUG nova.compute.provider_tree [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 590.051535] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100364, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.062761] env[63538]: DEBUG oslo_vmware.api [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5227892e-51ad-3cba-a61e-50918b064368, 'name': SearchDatastore_Task, 'duration_secs': 0.012087} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.069696] env[63538]: DEBUG nova.network.neutron [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Updating instance_info_cache with network_info: [{"id": "47d19b83-6292-46e2-835f-1198ef52374c", "address": "fa:16:3e:af:6f:01", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47d19b83-62", "ovs_interfaceid": "47d19b83-6292-46e2-835f-1198ef52374c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.069696] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e791366e-72db-4d12-8607-9c64c9f6072d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.078569] env[63538]: DEBUG oslo_vmware.api [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Waiting for the task: (returnval){ [ 590.078569] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f19c47-891c-361d-68f3-22aa9525601a" [ 590.078569] env[63538]: _type = "Task" [ 590.078569] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.090468] env[63538]: DEBUG oslo_vmware.api [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f19c47-891c-361d-68f3-22aa9525601a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.132070] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 590.132337] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 590.135088] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Deleting the datastore file [datastore1] 15a8424e-27a6-4b77-b57c-d163345b8fed {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 590.135088] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d1dc7c5-69b3-4476-af95-61775bd530c9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.148726] env[63538]: DEBUG oslo_vmware.api [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Waiting for the task: (returnval){ [ 590.148726] env[63538]: value = "task-5100366" [ 590.148726] env[63538]: _type = "Task" [ 590.148726] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.397468] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "refresh_cache-8fb62f47-cbf2-4b46-bc33-845e832f9ef0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 590.397468] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquired lock "refresh_cache-8fb62f47-cbf2-4b46-bc33-845e832f9ef0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.397468] env[63538]: DEBUG nova.network.neutron [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 590.557634] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.740s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.558465] env[63538]: DEBUG nova.compute.manager [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 590.563375] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100364, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.720405} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.563965] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.057s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.567377] env[63538]: INFO nova.compute.claims [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 590.572046] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] c8a02fa6-5232-4dde-b6dd-0da1089b6bbf/c8a02fa6-5232-4dde-b6dd-0da1089b6bbf.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 590.576508] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 590.577720] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Releasing lock "refresh_cache-2e1b0bc7-3909-48e2-b9be-26822a57ee67" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 590.578175] env[63538]: DEBUG nova.compute.manager [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Instance network_info: |[{"id": "47d19b83-6292-46e2-835f-1198ef52374c", "address": "fa:16:3e:af:6f:01", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47d19b83-62", "ovs_interfaceid": "47d19b83-6292-46e2-835f-1198ef52374c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 590.578684] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-76b80835-bff6-438c-8fc4-083f5ddac952 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.583259] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:af:6f:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24144f5a-050a-4f1e-8d8c-774dc16dc791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47d19b83-6292-46e2-835f-1198ef52374c', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 590.601522] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Creating folder: Project (3dc18da1ea704eeaaeb62633c4f76ee8). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 590.602379] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c4ebdeb-afd2-44ab-af9f-7a52dcd66d57 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.623044] env[63538]: DEBUG oslo_vmware.api [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f19c47-891c-361d-68f3-22aa9525601a, 'name': SearchDatastore_Task, 'duration_secs': 0.071197} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.625465] env[63538]: DEBUG oslo_concurrency.lockutils [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 590.625980] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] e3ba860b-afb8-4843-9d99-049dce205f9f/e3ba860b-afb8-4843-9d99-049dce205f9f.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 590.626565] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 590.626565] env[63538]: value = "task-5100368" [ 590.626565] env[63538]: _type = "Task" [ 590.626565] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.628961] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.629321] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 590.629839] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-af690f2d-83cf-4834-88a4-5c59fe1faa1b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.634363] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Created folder: Project (3dc18da1ea704eeaaeb62633c4f76ee8) in parent group-v992234. [ 590.634540] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Creating folder: Instances. Parent ref: group-v992259. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 590.635038] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-544f8093-8755-40e3-bb4f-ff960468e74f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.644021] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-da31164f-e135-457a-a3b3-9693dbe6ff6a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.665483] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100368, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.665658] env[63538]: DEBUG oslo_vmware.api [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Waiting for the task: (returnval){ [ 590.665658] env[63538]: value = "task-5100369" [ 590.665658] env[63538]: _type = "Task" [ 590.665658] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.671353] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 590.671608] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 590.679739] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e4dea0b-8587-4c57-a314-4b7e15453cb5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.686785] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Created folder: Instances in parent group-v992259. [ 590.687252] env[63538]: DEBUG oslo.service.loopingcall [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 590.688358] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 590.694732] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-868d8fe2-f86a-4001-a582-2f1a05805c78 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.718087] env[63538]: DEBUG oslo_vmware.api [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Task: {'id': task-5100366, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.284263} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.722881] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 590.723105] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 590.723315] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 590.723498] env[63538]: INFO nova.compute.manager [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Took 1.24 seconds to destroy the instance on the hypervisor. [ 590.723649] env[63538]: DEBUG oslo.service.loopingcall [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 590.723933] env[63538]: DEBUG oslo_vmware.api [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Waiting for the task: (returnval){ [ 590.723933] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d0daae-f321-991c-d97d-f212284fcf9d" [ 590.723933] env[63538]: _type = "Task" [ 590.723933] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.724652] env[63538]: DEBUG oslo_vmware.api [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Task: {'id': task-5100369, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.724834] env[63538]: DEBUG nova.compute.manager [-] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 590.724931] env[63538]: DEBUG nova.network.neutron [-] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 590.735025] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 590.735025] env[63538]: value = "task-5100371" [ 590.735025] env[63538]: _type = "Task" [ 590.735025] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.742698] env[63538]: DEBUG oslo_vmware.api [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d0daae-f321-991c-d97d-f212284fcf9d, 'name': SearchDatastore_Task, 'duration_secs': 0.016092} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.747242] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100371, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.747491] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f1ceec2-411b-4541-8007-53950a3b0d71 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.755160] env[63538]: DEBUG oslo_vmware.api [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Waiting for the task: (returnval){ [ 590.755160] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5231715f-8009-8d00-94d0-e6b9bea089ab" [ 590.755160] env[63538]: _type = "Task" [ 590.755160] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.765958] env[63538]: DEBUG oslo_vmware.api [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5231715f-8009-8d00-94d0-e6b9bea089ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.908124] env[63538]: DEBUG oslo_concurrency.lockutils [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Acquiring lock "543875b5-195a-476d-a0b4-3211ceefa27f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.908124] env[63538]: DEBUG oslo_concurrency.lockutils [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Lock "543875b5-195a-476d-a0b4-3211ceefa27f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.081223] env[63538]: DEBUG nova.compute.utils [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 591.086225] env[63538]: DEBUG nova.network.neutron [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 591.091975] env[63538]: DEBUG nova.compute.manager [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 591.092695] env[63538]: DEBUG nova.network.neutron [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 591.148124] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100368, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08448} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.151545] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 591.152563] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b69729b-9406-47fd-9456-5d4e6de9b965 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.179916] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] c8a02fa6-5232-4dde-b6dd-0da1089b6bbf/c8a02fa6-5232-4dde-b6dd-0da1089b6bbf.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 591.180143] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e9d115f-f508-40b7-a020-46ff1fa2c695 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.208248] env[63538]: DEBUG oslo_vmware.api [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Task: {'id': task-5100369, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.210692] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 591.210692] env[63538]: value = "task-5100372" [ 591.210692] env[63538]: _type = "Task" [ 591.210692] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.221382] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100372, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.244888] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100371, 'name': CreateVM_Task, 'duration_secs': 0.48128} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.245101] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 591.247602] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 591.247602] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.247602] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 591.247602] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab75c31d-103a-4262-a12c-b9692a643a1f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.253094] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 591.253094] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527c4c9e-4ddc-2edc-3f24-d6fdb40d0d53" [ 591.253094] env[63538]: _type = "Task" [ 591.253094] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.265957] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527c4c9e-4ddc-2edc-3f24-d6fdb40d0d53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.269937] env[63538]: DEBUG oslo_vmware.api [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5231715f-8009-8d00-94d0-e6b9bea089ab, 'name': SearchDatastore_Task, 'duration_secs': 0.017538} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.270366] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 591.270966] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 0a7c34e0-1acc-4761-804a-eb9ee00fdd77/0a7c34e0-1acc-4761-804a-eb9ee00fdd77.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 591.270966] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9ca1e998-3715-4041-a261-5145b33b0e9a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.278762] env[63538]: DEBUG oslo_vmware.api [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Waiting for the task: (returnval){ [ 591.278762] env[63538]: value = "task-5100373" [ 591.278762] env[63538]: _type = "Task" [ 591.278762] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.288071] env[63538]: DEBUG oslo_vmware.api [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Task: {'id': task-5100373, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.305774] env[63538]: DEBUG nova.policy [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '206acdac4dc449c4883937aa04183dac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7cc062eb2325468eaa723c9da2117df8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 591.498786] env[63538]: DEBUG oslo_concurrency.lockutils [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "ede967c0-ec3a-4f26-8290-0ee36890cd75" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.498786] env[63538]: DEBUG oslo_concurrency.lockutils [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "ede967c0-ec3a-4f26-8290-0ee36890cd75" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.598568] env[63538]: DEBUG nova.compute.manager [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 591.665759] env[63538]: DEBUG nova.network.neutron [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Updating instance_info_cache with network_info: [{"id": "5b823d56-3162-4875-a202-4526e8d9e433", "address": "fa:16:3e:11:db:ad", "network": {"id": "2801e625-b23c-455a-80ae-80f2a4f34148", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1431071033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1f0c999ede418c866074d9276050ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b823d56-31", "ovs_interfaceid": "5b823d56-3162-4875-a202-4526e8d9e433", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 591.709734] env[63538]: DEBUG oslo_vmware.api [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Task: {'id': task-5100369, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.737684} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.713046] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] e3ba860b-afb8-4843-9d99-049dce205f9f/e3ba860b-afb8-4843-9d99-049dce205f9f.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 591.713293] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 591.718358] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-61df99d2-fa7a-4d1a-950c-296a72921614 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.738943] env[63538]: DEBUG oslo_vmware.api [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Waiting for the task: (returnval){ [ 591.738943] env[63538]: value = "task-5100374" [ 591.738943] env[63538]: _type = "Task" [ 591.738943] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.739865] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100372, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.781721] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527c4c9e-4ddc-2edc-3f24-d6fdb40d0d53, 'name': SearchDatastore_Task, 'duration_secs': 0.06303} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.785501] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 591.786327] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 591.787183] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 591.787451] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.790638] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 591.790638] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d218ac98-998c-4759-adc5-baf0091691bb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.803215] env[63538]: DEBUG oslo_vmware.api [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Task: {'id': task-5100373, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.809907] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 591.810243] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 591.811322] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-260d840d-5e2f-457d-8159-bde87aaa99fa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.820059] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 591.820059] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521f45e9-6ee3-f134-6d88-65d7502af776" [ 591.820059] env[63538]: _type = "Task" [ 591.820059] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.835655] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521f45e9-6ee3-f134-6d88-65d7502af776, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.949984] env[63538]: DEBUG nova.compute.manager [None req-b6c6aa97-b3e6-4a25-bbc0-b794fbb996ff tempest-ServerDiagnosticsTest-791129369 tempest-ServerDiagnosticsTest-791129369-project-admin] [instance: e1710498-0616-4862-afc0-6e452dc19882] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 591.956442] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c921a4-e4dd-4f7d-bbe1-df3ad6d34ae2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.966629] env[63538]: INFO nova.compute.manager [None req-b6c6aa97-b3e6-4a25-bbc0-b794fbb996ff tempest-ServerDiagnosticsTest-791129369 tempest-ServerDiagnosticsTest-791129369-project-admin] [instance: e1710498-0616-4862-afc0-6e452dc19882] Retrieving diagnostics [ 591.969448] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f190a9-1e54-48f7-afce-750dc5be4673 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.075551] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2296a367-92aa-49c9-a3ad-1f2ef6bd36ab {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.084049] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6335f7f6-be1e-4081-9009-df6285ecd665 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.129345] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-436c53d8-e355-40bb-ba8a-7bdecf84e76d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.141395] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da166b34-faf5-424d-bf30-611cf057af2f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.158601] env[63538]: DEBUG nova.compute.provider_tree [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 592.168355] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Releasing lock "refresh_cache-8fb62f47-cbf2-4b46-bc33-845e832f9ef0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 592.173024] env[63538]: DEBUG nova.compute.manager [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Instance network_info: |[{"id": "5b823d56-3162-4875-a202-4526e8d9e433", "address": "fa:16:3e:11:db:ad", "network": {"id": "2801e625-b23c-455a-80ae-80f2a4f34148", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1431071033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1f0c999ede418c866074d9276050ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b823d56-31", "ovs_interfaceid": "5b823d56-3162-4875-a202-4526e8d9e433", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 592.173307] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:db:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6edb8eae-1113-49d0-84f7-9fd9f82b26fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5b823d56-3162-4875-a202-4526e8d9e433', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 592.178285] env[63538]: DEBUG oslo.service.loopingcall [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 592.179395] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 592.179636] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f60664b8-a2ee-46e6-baf5-840fc68b1900 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.198896] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] Acquiring lock "4b8fb9ad-a366-423d-81b1-04c5e4ec9264" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 592.199160] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] Lock "4b8fb9ad-a366-423d-81b1-04c5e4ec9264" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 592.199365] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] Acquiring lock "4b8fb9ad-a366-423d-81b1-04c5e4ec9264-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 592.199547] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] Lock "4b8fb9ad-a366-423d-81b1-04c5e4ec9264-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 592.199707] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] Lock "4b8fb9ad-a366-423d-81b1-04c5e4ec9264-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 592.203701] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 592.203701] env[63538]: value = "task-5100375" [ 592.203701] env[63538]: _type = "Task" [ 592.203701] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.204212] env[63538]: INFO nova.compute.manager [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Terminating instance [ 592.211173] env[63538]: DEBUG nova.compute.manager [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 592.211380] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 592.212999] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82d390e6-49af-4cd0-9431-6e97d4613853 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.225021] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100375, 'name': CreateVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.227837] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 592.228133] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51f1d929-1e98-4a82-9268-1abfaf180ec5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.233232] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100372, 'name': ReconfigVM_Task, 'duration_secs': 0.977568} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.234209] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Reconfigured VM instance instance-00000006 to attach disk [datastore2] c8a02fa6-5232-4dde-b6dd-0da1089b6bbf/c8a02fa6-5232-4dde-b6dd-0da1089b6bbf.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 592.235164] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2dcc61a5-ff08-4d07-91c8-f7fec8894083 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.243335] env[63538]: DEBUG oslo_vmware.api [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] Waiting for the task: (returnval){ [ 592.243335] env[63538]: value = "task-5100376" [ 592.243335] env[63538]: _type = "Task" [ 592.243335] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.246275] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 592.246275] env[63538]: value = "task-5100377" [ 592.246275] env[63538]: _type = "Task" [ 592.246275] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.280044] env[63538]: DEBUG oslo_vmware.api [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] Task: {'id': task-5100376, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.280044] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100377, 'name': Rename_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.280044] env[63538]: DEBUG oslo_vmware.api [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Task: {'id': task-5100374, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.37448} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.280044] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 592.280044] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c00ee7-069b-425a-b511-f43b6769da58 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.307369] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Reconfiguring VM instance instance-00000008 to attach disk [datastore2] e3ba860b-afb8-4843-9d99-049dce205f9f/e3ba860b-afb8-4843-9d99-049dce205f9f.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 592.311549] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5340dfc6-71fd-43b7-84ff-c35779bbc774 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.326724] env[63538]: DEBUG oslo_vmware.api [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Task: {'id': task-5100373, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.798} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.327129] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 0a7c34e0-1acc-4761-804a-eb9ee00fdd77/0a7c34e0-1acc-4761-804a-eb9ee00fdd77.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 592.327455] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 592.333733] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2438f79f-f165-413a-822f-977e1edcce02 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.339629] env[63538]: DEBUG oslo_vmware.api [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Waiting for the task: (returnval){ [ 592.339629] env[63538]: value = "task-5100378" [ 592.339629] env[63538]: _type = "Task" [ 592.339629] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.347097] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521f45e9-6ee3-f134-6d88-65d7502af776, 'name': SearchDatastore_Task, 'duration_secs': 0.06946} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.351688] env[63538]: DEBUG oslo_vmware.api [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Waiting for the task: (returnval){ [ 592.351688] env[63538]: value = "task-5100379" [ 592.351688] env[63538]: _type = "Task" [ 592.351688] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.351688] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1252a9e2-5d38-486b-991b-d143086e8d77 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.363415] env[63538]: DEBUG nova.network.neutron [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Successfully updated port: a5dfe48b-4acc-472c-8e00-f936b4068ea5 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 592.365529] env[63538]: DEBUG oslo_vmware.api [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Task: {'id': task-5100378, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.369206] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 592.369206] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5285139c-a329-1c2c-ad4a-bd2891d49c04" [ 592.369206] env[63538]: _type = "Task" [ 592.369206] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.376076] env[63538]: DEBUG oslo_vmware.api [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Task: {'id': task-5100379, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.382719] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5285139c-a329-1c2c-ad4a-bd2891d49c04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.635044] env[63538]: DEBUG nova.compute.manager [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 592.663604] env[63538]: DEBUG nova.scheduler.client.report [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 592.683955] env[63538]: DEBUG nova.virt.hardware [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 592.684487] env[63538]: DEBUG nova.virt.hardware [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 592.684487] env[63538]: DEBUG nova.virt.hardware [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 592.684890] env[63538]: DEBUG nova.virt.hardware [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 592.684890] env[63538]: DEBUG nova.virt.hardware [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 592.684890] env[63538]: DEBUG nova.virt.hardware [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 592.685096] env[63538]: DEBUG nova.virt.hardware [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 592.685207] env[63538]: DEBUG nova.virt.hardware [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 592.685374] env[63538]: DEBUG nova.virt.hardware [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 592.685538] env[63538]: DEBUG nova.virt.hardware [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 592.685711] env[63538]: DEBUG nova.virt.hardware [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 592.686701] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6da79b9-6323-4878-aa4b-9e2571e68aa8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.698579] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ffe0aea-537a-4a4b-ab59-a1e9fe95917c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.725633] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100375, 'name': CreateVM_Task, 'duration_secs': 0.506888} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.725633] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 592.726215] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.726328] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.730023] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 592.730023] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf80520a-7b98-4cba-8bff-3c3e47e5e2e2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.737499] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 592.737499] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522d929f-fb41-2021-ea42-ce976e0f9aae" [ 592.737499] env[63538]: _type = "Task" [ 592.737499] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.749420] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522d929f-fb41-2021-ea42-ce976e0f9aae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.759117] env[63538]: DEBUG oslo_vmware.api [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] Task: {'id': task-5100376, 'name': PowerOffVM_Task, 'duration_secs': 0.365527} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.763809] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 592.763929] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 592.766355] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3631b112-61e2-4034-89b0-7a6b049b3e05 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.776125] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100377, 'name': Rename_Task, 'duration_secs': 0.248696} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.776125] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 592.776125] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cbe41b24-a9f0-42da-aeb8-67546438213b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.785869] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 592.785869] env[63538]: value = "task-5100381" [ 592.785869] env[63538]: _type = "Task" [ 592.785869] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.799472] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100381, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.845373] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 592.845692] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 592.845890] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] Deleting the datastore file [datastore1] 4b8fb9ad-a366-423d-81b1-04c5e4ec9264 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 592.852826] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9ab0179e-290f-4543-a7d9-27bdeb980012 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.875672] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquiring lock "refresh_cache-f9fa5578-acf3-416f-9cb0-8ceb00e5132d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.875815] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquired lock "refresh_cache-f9fa5578-acf3-416f-9cb0-8ceb00e5132d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.875972] env[63538]: DEBUG nova.network.neutron [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 592.881022] env[63538]: DEBUG oslo_vmware.api [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Task: {'id': task-5100378, 'name': ReconfigVM_Task, 'duration_secs': 0.456397} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.884507] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Reconfigured VM instance instance-00000008 to attach disk [datastore2] e3ba860b-afb8-4843-9d99-049dce205f9f/e3ba860b-afb8-4843-9d99-049dce205f9f.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 592.887027] env[63538]: DEBUG oslo_vmware.api [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] Waiting for the task: (returnval){ [ 592.887027] env[63538]: value = "task-5100382" [ 592.887027] env[63538]: _type = "Task" [ 592.887027] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.890614] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5da21d8e-6b18-41b1-a9a5-24533604ce66 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.893876] env[63538]: DEBUG oslo_vmware.api [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Task: {'id': task-5100379, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.15358} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.904962] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 592.910257] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca8767b-352d-4860-adc2-99ad78102035 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.913926] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5285139c-a329-1c2c-ad4a-bd2891d49c04, 'name': SearchDatastore_Task, 'duration_secs': 0.023325} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.919640] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 592.920324] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 2e1b0bc7-3909-48e2-b9be-26822a57ee67/2e1b0bc7-3909-48e2-b9be-26822a57ee67.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 592.920493] env[63538]: DEBUG oslo_vmware.api [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Waiting for the task: (returnval){ [ 592.920493] env[63538]: value = "task-5100383" [ 592.920493] env[63538]: _type = "Task" [ 592.920493] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.922557] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a6511f53-4cc5-4fc6-a699-45d2c56c2716 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.939931] env[63538]: DEBUG nova.network.neutron [-] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 592.954842] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] 0a7c34e0-1acc-4761-804a-eb9ee00fdd77/0a7c34e0-1acc-4761-804a-eb9ee00fdd77.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 592.955188] env[63538]: DEBUG oslo_vmware.api [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] Task: {'id': task-5100382, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.956518] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17b00e0a-bb71-46f5-b574-1894b9ec3fe1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.982043] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 592.982043] env[63538]: value = "task-5100384" [ 592.982043] env[63538]: _type = "Task" [ 592.982043] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.992454] env[63538]: DEBUG oslo_vmware.api [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Task: {'id': task-5100383, 'name': Rename_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.992990] env[63538]: DEBUG oslo_vmware.api [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Waiting for the task: (returnval){ [ 592.992990] env[63538]: value = "task-5100385" [ 592.992990] env[63538]: _type = "Task" [ 592.992990] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.000577] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100384, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.006630] env[63538]: DEBUG oslo_vmware.api [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Task: {'id': task-5100385, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.177664] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.613s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 593.182022] env[63538]: DEBUG nova.compute.manager [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 593.182022] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 8.758s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.256859] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522d929f-fb41-2021-ea42-ce976e0f9aae, 'name': SearchDatastore_Task, 'duration_secs': 0.019564} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.258029] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 593.258309] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 593.258639] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 593.258822] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.259054] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 593.259711] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c061e443-db8e-4689-9675-99a750202c18 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.262900] env[63538]: DEBUG nova.network.neutron [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Successfully created port: 589ae6e1-42c9-49d2-bc0b-2615e1b4cfb0 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 593.272018] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 593.272414] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 593.272968] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a710609-906a-46ee-8ebc-d4f3f323ef34 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.279713] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 593.279713] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e82039-74f1-b93a-305e-2584a9228812" [ 593.279713] env[63538]: _type = "Task" [ 593.279713] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.294311] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e82039-74f1-b93a-305e-2584a9228812, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.305704] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100381, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.411821] env[63538]: DEBUG oslo_vmware.api [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] Task: {'id': task-5100382, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.439525} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.411821] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 593.411821] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 593.411821] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 593.411821] env[63538]: INFO nova.compute.manager [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Took 1.20 seconds to destroy the instance on the hypervisor. [ 593.412445] env[63538]: DEBUG oslo.service.loopingcall [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 593.412445] env[63538]: DEBUG nova.compute.manager [-] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 593.412445] env[63538]: DEBUG nova.network.neutron [-] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 593.439022] env[63538]: DEBUG oslo_vmware.api [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Task: {'id': task-5100383, 'name': Rename_Task, 'duration_secs': 0.200023} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.439718] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 593.439718] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d4a5c75c-bbd2-4921-92e9-159688bb18d9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.448217] env[63538]: DEBUG oslo_vmware.api [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Waiting for the task: (returnval){ [ 593.448217] env[63538]: value = "task-5100386" [ 593.448217] env[63538]: _type = "Task" [ 593.448217] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.458948] env[63538]: INFO nova.compute.manager [-] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Took 2.73 seconds to deallocate network for instance. [ 593.459283] env[63538]: DEBUG oslo_vmware.api [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Task: {'id': task-5100386, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.493329] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100384, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.509083] env[63538]: DEBUG oslo_vmware.api [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Task: {'id': task-5100385, 'name': ReconfigVM_Task, 'duration_secs': 0.365454} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.509231] env[63538]: DEBUG nova.network.neutron [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 593.511507] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Reconfigured VM instance instance-00000007 to attach disk [datastore2] 0a7c34e0-1acc-4761-804a-eb9ee00fdd77/0a7c34e0-1acc-4761-804a-eb9ee00fdd77.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 593.515086] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3db1ed6d-3644-4582-9665-87d8a21715b9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.525888] env[63538]: DEBUG oslo_vmware.api [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Waiting for the task: (returnval){ [ 593.525888] env[63538]: value = "task-5100387" [ 593.525888] env[63538]: _type = "Task" [ 593.525888] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.537875] env[63538]: DEBUG oslo_vmware.api [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Task: {'id': task-5100387, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.689024] env[63538]: DEBUG nova.compute.utils [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 593.697519] env[63538]: DEBUG nova.compute.manager [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 593.698042] env[63538]: DEBUG nova.network.neutron [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 593.798192] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e82039-74f1-b93a-305e-2584a9228812, 'name': SearchDatastore_Task, 'duration_secs': 0.028522} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.799886] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e62528dd-0c4f-4804-9587-23974c0720c2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.806734] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100381, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.813128] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 593.813128] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cb02ea-5fb2-a6ad-462a-2a59c94641de" [ 593.813128] env[63538]: _type = "Task" [ 593.813128] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.822944] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cb02ea-5fb2-a6ad-462a-2a59c94641de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.824711] env[63538]: DEBUG nova.policy [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9873cd990aba452b8b9ef7a8b67f7f53', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd7a98c2190944e4284f2c4f02cee8ca2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 593.965910] env[63538]: DEBUG oslo_vmware.api [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Task: {'id': task-5100386, 'name': PowerOnVM_Task} progress is 90%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.971861] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.004298] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100384, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.758761} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.004298] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 2e1b0bc7-3909-48e2-b9be-26822a57ee67/2e1b0bc7-3909-48e2-b9be-26822a57ee67.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 594.004298] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 594.004298] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cd911d80-85df-4e51-9189-0a96424ef42d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.012328] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 594.012328] env[63538]: value = "task-5100388" [ 594.012328] env[63538]: _type = "Task" [ 594.012328] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.026667] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100388, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.039676] env[63538]: DEBUG oslo_vmware.api [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Task: {'id': task-5100387, 'name': Rename_Task, 'duration_secs': 0.229909} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.040157] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 594.041668] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-828794a9-3ff5-44f8-bf98-ca3accf2c5ec {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.050371] env[63538]: DEBUG oslo_vmware.api [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Waiting for the task: (returnval){ [ 594.050371] env[63538]: value = "task-5100389" [ 594.050371] env[63538]: _type = "Task" [ 594.050371] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.059852] env[63538]: DEBUG oslo_vmware.api [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Task: {'id': task-5100389, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.199766] env[63538]: DEBUG nova.compute.manager [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 594.242241] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 174368d1-9910-495b-a923-842e0440fd01 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 594.242241] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 15a8424e-27a6-4b77-b57c-d163345b8fed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 594.242241] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 4b8fb9ad-a366-423d-81b1-04c5e4ec9264 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 594.242241] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance e1710498-0616-4862-afc0-6e452dc19882 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 594.242375] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance d99b7b8e-633f-4fba-bce6-9b8e9e9892d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 594.242375] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance c8a02fa6-5232-4dde-b6dd-0da1089b6bbf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 594.243708] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 0a7c34e0-1acc-4761-804a-eb9ee00fdd77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 594.244112] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance e3ba860b-afb8-4843-9d99-049dce205f9f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 594.244575] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 2e1b0bc7-3909-48e2-b9be-26822a57ee67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 594.244575] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 8fb62f47-cbf2-4b46-bc33-845e832f9ef0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 594.245381] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance f9fa5578-acf3-416f-9cb0-8ceb00e5132d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 594.245381] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 594.245381] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 36d40b69-fae7-4867-afa1-4befdc96bde0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 594.312286] env[63538]: DEBUG oslo_vmware.api [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100381, 'name': PowerOnVM_Task, 'duration_secs': 1.152471} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.312286] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 594.312286] env[63538]: INFO nova.compute.manager [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Took 17.53 seconds to spawn the instance on the hypervisor. [ 594.312286] env[63538]: DEBUG nova.compute.manager [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 594.312286] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de9e321c-ca66-46ea-9575-2c5b4ca35984 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.337957] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cb02ea-5fb2-a6ad-462a-2a59c94641de, 'name': SearchDatastore_Task, 'duration_secs': 0.025341} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.339880] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 594.339880] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 8fb62f47-cbf2-4b46-bc33-845e832f9ef0/8fb62f47-cbf2-4b46-bc33-845e832f9ef0.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 594.344838] env[63538]: DEBUG nova.compute.manager [req-e1b753da-6b79-415a-9348-951227722b24 req-de504023-2292-4705-bd8b-e9c7244fac04 service nova] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Received event network-changed-5e9b18aa-a332-400c-9c74-aed76633d8b5 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 594.344838] env[63538]: DEBUG nova.compute.manager [req-e1b753da-6b79-415a-9348-951227722b24 req-de504023-2292-4705-bd8b-e9c7244fac04 service nova] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Refreshing instance network info cache due to event network-changed-5e9b18aa-a332-400c-9c74-aed76633d8b5. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 594.344838] env[63538]: DEBUG oslo_concurrency.lockutils [req-e1b753da-6b79-415a-9348-951227722b24 req-de504023-2292-4705-bd8b-e9c7244fac04 service nova] Acquiring lock "refresh_cache-0a7c34e0-1acc-4761-804a-eb9ee00fdd77" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.344838] env[63538]: DEBUG oslo_concurrency.lockutils [req-e1b753da-6b79-415a-9348-951227722b24 req-de504023-2292-4705-bd8b-e9c7244fac04 service nova] Acquired lock "refresh_cache-0a7c34e0-1acc-4761-804a-eb9ee00fdd77" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.344838] env[63538]: DEBUG nova.network.neutron [req-e1b753da-6b79-415a-9348-951227722b24 req-de504023-2292-4705-bd8b-e9c7244fac04 service nova] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Refreshing network info cache for port 5e9b18aa-a332-400c-9c74-aed76633d8b5 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 594.348142] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8cc318ef-1d5f-48bb-af51-38ccc7fa0b9a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.358873] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 594.358873] env[63538]: value = "task-5100390" [ 594.358873] env[63538]: _type = "Task" [ 594.358873] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.374992] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100390, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.395507] env[63538]: DEBUG nova.network.neutron [-] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.418517] env[63538]: DEBUG nova.network.neutron [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Successfully created port: ac528a58-339f-4621-890a-afe5e3ee634d {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 594.427860] env[63538]: DEBUG nova.network.neutron [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Updating instance_info_cache with network_info: [{"id": "a5dfe48b-4acc-472c-8e00-f936b4068ea5", "address": "fa:16:3e:a9:56:1a", "network": {"id": "4fd42fb1-fcc5-467d-88b7-1a5ab3297631", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-9303923-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c5e6ed681ed4078bd9115b30f419d9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5dfe48b-4a", "ovs_interfaceid": "a5dfe48b-4acc-472c-8e00-f936b4068ea5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.469112] env[63538]: DEBUG oslo_vmware.api [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Task: {'id': task-5100386, 'name': PowerOnVM_Task, 'duration_secs': 0.574405} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.470295] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 594.470702] env[63538]: INFO nova.compute.manager [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Took 12.58 seconds to spawn the instance on the hypervisor. [ 594.478025] env[63538]: DEBUG nova.compute.manager [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 594.478025] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6acbc4fc-fe22-43c7-aa45-3f14481e4191 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.528463] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100388, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.268062} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.528660] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 594.529542] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f28b858-5a4d-4df4-927e-72352719d356 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.564382] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Reconfiguring VM instance instance-00000009 to attach disk [datastore2] 2e1b0bc7-3909-48e2-b9be-26822a57ee67/2e1b0bc7-3909-48e2-b9be-26822a57ee67.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 594.564382] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c54a5c1d-176d-4976-9285-71c407ac1d2e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.599391] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 594.599391] env[63538]: value = "task-5100391" [ 594.599391] env[63538]: _type = "Task" [ 594.599391] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.603164] env[63538]: DEBUG oslo_vmware.api [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Task: {'id': task-5100389, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.614228] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100391, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.616385] env[63538]: DEBUG nova.compute.manager [req-50de407f-205c-4d3d-81b1-a8ef233cc935 req-41130d4d-7f6a-4d00-9f7a-cfd40531dd29 service nova] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Received event network-vif-plugged-47d19b83-6292-46e2-835f-1198ef52374c {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 594.616591] env[63538]: DEBUG oslo_concurrency.lockutils [req-50de407f-205c-4d3d-81b1-a8ef233cc935 req-41130d4d-7f6a-4d00-9f7a-cfd40531dd29 service nova] Acquiring lock "2e1b0bc7-3909-48e2-b9be-26822a57ee67-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.616791] env[63538]: DEBUG oslo_concurrency.lockutils [req-50de407f-205c-4d3d-81b1-a8ef233cc935 req-41130d4d-7f6a-4d00-9f7a-cfd40531dd29 service nova] Lock "2e1b0bc7-3909-48e2-b9be-26822a57ee67-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.617385] env[63538]: DEBUG oslo_concurrency.lockutils [req-50de407f-205c-4d3d-81b1-a8ef233cc935 req-41130d4d-7f6a-4d00-9f7a-cfd40531dd29 service nova] Lock "2e1b0bc7-3909-48e2-b9be-26822a57ee67-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 594.617385] env[63538]: DEBUG nova.compute.manager [req-50de407f-205c-4d3d-81b1-a8ef233cc935 req-41130d4d-7f6a-4d00-9f7a-cfd40531dd29 service nova] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] No waiting events found dispatching network-vif-plugged-47d19b83-6292-46e2-835f-1198ef52374c {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 594.617385] env[63538]: WARNING nova.compute.manager [req-50de407f-205c-4d3d-81b1-a8ef233cc935 req-41130d4d-7f6a-4d00-9f7a-cfd40531dd29 service nova] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Received unexpected event network-vif-plugged-47d19b83-6292-46e2-835f-1198ef52374c for instance with vm_state building and task_state spawning. [ 594.617592] env[63538]: DEBUG nova.compute.manager [req-50de407f-205c-4d3d-81b1-a8ef233cc935 req-41130d4d-7f6a-4d00-9f7a-cfd40531dd29 service nova] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Received event network-changed-47d19b83-6292-46e2-835f-1198ef52374c {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 594.617671] env[63538]: DEBUG nova.compute.manager [req-50de407f-205c-4d3d-81b1-a8ef233cc935 req-41130d4d-7f6a-4d00-9f7a-cfd40531dd29 service nova] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Refreshing instance network info cache due to event network-changed-47d19b83-6292-46e2-835f-1198ef52374c. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 594.617855] env[63538]: DEBUG oslo_concurrency.lockutils [req-50de407f-205c-4d3d-81b1-a8ef233cc935 req-41130d4d-7f6a-4d00-9f7a-cfd40531dd29 service nova] Acquiring lock "refresh_cache-2e1b0bc7-3909-48e2-b9be-26822a57ee67" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.617981] env[63538]: DEBUG oslo_concurrency.lockutils [req-50de407f-205c-4d3d-81b1-a8ef233cc935 req-41130d4d-7f6a-4d00-9f7a-cfd40531dd29 service nova] Acquired lock "refresh_cache-2e1b0bc7-3909-48e2-b9be-26822a57ee67" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.618149] env[63538]: DEBUG nova.network.neutron [req-50de407f-205c-4d3d-81b1-a8ef233cc935 req-41130d4d-7f6a-4d00-9f7a-cfd40531dd29 service nova] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Refreshing network info cache for port 47d19b83-6292-46e2-835f-1198ef52374c {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 594.645269] env[63538]: DEBUG nova.compute.manager [req-b3af1df0-6a6d-4b9e-970f-9c586144023b req-0c14fb3f-6d67-4f51-ae40-76bbe12a570c service nova] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Received event network-vif-plugged-5b823d56-3162-4875-a202-4526e8d9e433 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 594.647770] env[63538]: DEBUG oslo_concurrency.lockutils [req-b3af1df0-6a6d-4b9e-970f-9c586144023b req-0c14fb3f-6d67-4f51-ae40-76bbe12a570c service nova] Acquiring lock "8fb62f47-cbf2-4b46-bc33-845e832f9ef0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.647770] env[63538]: DEBUG oslo_concurrency.lockutils [req-b3af1df0-6a6d-4b9e-970f-9c586144023b req-0c14fb3f-6d67-4f51-ae40-76bbe12a570c service nova] Lock "8fb62f47-cbf2-4b46-bc33-845e832f9ef0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.647770] env[63538]: DEBUG oslo_concurrency.lockutils [req-b3af1df0-6a6d-4b9e-970f-9c586144023b req-0c14fb3f-6d67-4f51-ae40-76bbe12a570c service nova] Lock "8fb62f47-cbf2-4b46-bc33-845e832f9ef0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 594.647770] env[63538]: DEBUG nova.compute.manager [req-b3af1df0-6a6d-4b9e-970f-9c586144023b req-0c14fb3f-6d67-4f51-ae40-76bbe12a570c service nova] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] No waiting events found dispatching network-vif-plugged-5b823d56-3162-4875-a202-4526e8d9e433 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 594.647770] env[63538]: WARNING nova.compute.manager [req-b3af1df0-6a6d-4b9e-970f-9c586144023b req-0c14fb3f-6d67-4f51-ae40-76bbe12a570c service nova] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Received unexpected event network-vif-plugged-5b823d56-3162-4875-a202-4526e8d9e433 for instance with vm_state building and task_state spawning. [ 594.748964] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance bf54098e-91a8-403f-a6fe-b58a62daaadb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 594.853801] env[63538]: INFO nova.compute.manager [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Took 30.72 seconds to build instance. [ 594.874297] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100390, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.901274] env[63538]: INFO nova.compute.manager [-] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Took 1.49 seconds to deallocate network for instance. [ 594.935629] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Releasing lock "refresh_cache-f9fa5578-acf3-416f-9cb0-8ceb00e5132d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 594.935629] env[63538]: DEBUG nova.compute.manager [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Instance network_info: |[{"id": "a5dfe48b-4acc-472c-8e00-f936b4068ea5", "address": "fa:16:3e:a9:56:1a", "network": {"id": "4fd42fb1-fcc5-467d-88b7-1a5ab3297631", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-9303923-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c5e6ed681ed4078bd9115b30f419d9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5dfe48b-4a", "ovs_interfaceid": "a5dfe48b-4acc-472c-8e00-f936b4068ea5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 594.935837] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:56:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b399c74-1411-408a-b4cd-84e268ae83fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a5dfe48b-4acc-472c-8e00-f936b4068ea5', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 594.945548] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Creating folder: Project (6c5e6ed681ed4078bd9115b30f419d9a). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 594.948857] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b178e3e2-df77-4d36-b014-17067f871274 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.969682] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Created folder: Project (6c5e6ed681ed4078bd9115b30f419d9a) in parent group-v992234. [ 594.969682] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Creating folder: Instances. Parent ref: group-v992263. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 594.971514] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9a39265b-b417-4727-87d6-879067fdb6b9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.986196] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Created folder: Instances in parent group-v992263. [ 594.986632] env[63538]: DEBUG oslo.service.loopingcall [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 594.986753] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 594.986971] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-27b90896-1931-4ea9-a3b4-90468927e32f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.021226] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 595.021226] env[63538]: value = "task-5100394" [ 595.021226] env[63538]: _type = "Task" [ 595.021226] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.021226] env[63538]: INFO nova.compute.manager [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Took 29.41 seconds to build instance. [ 595.039463] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100394, 'name': CreateVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.077466] env[63538]: DEBUG oslo_vmware.api [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Task: {'id': task-5100389, 'name': PowerOnVM_Task, 'duration_secs': 0.575781} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.078249] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 595.078249] env[63538]: INFO nova.compute.manager [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Took 15.68 seconds to spawn the instance on the hypervisor. [ 595.078249] env[63538]: DEBUG nova.compute.manager [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 595.079299] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed5e236-fb30-4bd5-b94d-6306c3732cff {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.116410] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100391, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.215270] env[63538]: DEBUG nova.compute.manager [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 595.251586] env[63538]: DEBUG nova.virt.hardware [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 595.251852] env[63538]: DEBUG nova.virt.hardware [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 595.252084] env[63538]: DEBUG nova.virt.hardware [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 595.252257] env[63538]: DEBUG nova.virt.hardware [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 595.252409] env[63538]: DEBUG nova.virt.hardware [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 595.252556] env[63538]: DEBUG nova.virt.hardware [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 595.252766] env[63538]: DEBUG nova.virt.hardware [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 595.252930] env[63538]: DEBUG nova.virt.hardware [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 595.253268] env[63538]: DEBUG nova.virt.hardware [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 595.253439] env[63538]: DEBUG nova.virt.hardware [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 595.253620] env[63538]: DEBUG nova.virt.hardware [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 595.254535] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8683b464-8f8b-4acf-83be-8138c71a72dc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.261133] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 595.270657] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b3d019a-3ab0-4d7a-aebf-667790dd7be5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.357497] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bbd2bb6b-4155-45af-8874-061d84ac2473 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "c8a02fa6-5232-4dde-b6dd-0da1089b6bbf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.233s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 595.358812] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "c8a02fa6-5232-4dde-b6dd-0da1089b6bbf" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 19.053s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.359023] env[63538]: INFO nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] During sync_power_state the instance has a pending task (networking). Skip. [ 595.359227] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "c8a02fa6-5232-4dde-b6dd-0da1089b6bbf" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 595.374106] env[63538]: DEBUG nova.network.neutron [req-e1b753da-6b79-415a-9348-951227722b24 req-de504023-2292-4705-bd8b-e9c7244fac04 service nova] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Updated VIF entry in instance network info cache for port 5e9b18aa-a332-400c-9c74-aed76633d8b5. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 595.374482] env[63538]: DEBUG nova.network.neutron [req-e1b753da-6b79-415a-9348-951227722b24 req-de504023-2292-4705-bd8b-e9c7244fac04 service nova] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Updating instance_info_cache with network_info: [{"id": "5e9b18aa-a332-400c-9c74-aed76633d8b5", "address": "fa:16:3e:93:27:04", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.53", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e9b18aa-a3", "ovs_interfaceid": "5e9b18aa-a332-400c-9c74-aed76633d8b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.384273] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100390, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.834752} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.388028] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 8fb62f47-cbf2-4b46-bc33-845e832f9ef0/8fb62f47-cbf2-4b46-bc33-845e832f9ef0.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 595.388028] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 595.388028] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-03fd4ae9-8ee7-4ff3-a687-df8d346583a7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.398283] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 595.398283] env[63538]: value = "task-5100395" [ 595.398283] env[63538]: _type = "Task" [ 595.398283] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.409694] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.418024] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100395, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.498333] env[63538]: DEBUG nova.network.neutron [req-50de407f-205c-4d3d-81b1-a8ef233cc935 req-41130d4d-7f6a-4d00-9f7a-cfd40531dd29 service nova] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Updated VIF entry in instance network info cache for port 47d19b83-6292-46e2-835f-1198ef52374c. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 595.498697] env[63538]: DEBUG nova.network.neutron [req-50de407f-205c-4d3d-81b1-a8ef233cc935 req-41130d4d-7f6a-4d00-9f7a-cfd40531dd29 service nova] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Updating instance_info_cache with network_info: [{"id": "47d19b83-6292-46e2-835f-1198ef52374c", "address": "fa:16:3e:af:6f:01", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47d19b83-62", "ovs_interfaceid": "47d19b83-6292-46e2-835f-1198ef52374c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.529382] env[63538]: DEBUG oslo_concurrency.lockutils [None req-024a6ca7-4311-4b8d-a50f-c923ba6a82ef tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Lock "e3ba860b-afb8-4843-9d99-049dce205f9f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.932s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 595.537796] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100394, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.605217] env[63538]: INFO nova.compute.manager [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Took 31.29 seconds to build instance. [ 595.617398] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100391, 'name': ReconfigVM_Task, 'duration_secs': 0.61622} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.617592] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Reconfigured VM instance instance-00000009 to attach disk [datastore2] 2e1b0bc7-3909-48e2-b9be-26822a57ee67/2e1b0bc7-3909-48e2-b9be-26822a57ee67.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 595.618231] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-203cf829-2c16-45bd-818b-447ee6792b5a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.629440] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 595.629440] env[63538]: value = "task-5100396" [ 595.629440] env[63538]: _type = "Task" [ 595.629440] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.639959] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100396, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.765040] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 102c0463-fb64-4dda-914c-b98c8e9991ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 595.861315] env[63538]: DEBUG nova.compute.manager [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 595.886481] env[63538]: DEBUG oslo_concurrency.lockutils [req-e1b753da-6b79-415a-9348-951227722b24 req-de504023-2292-4705-bd8b-e9c7244fac04 service nova] Releasing lock "refresh_cache-0a7c34e0-1acc-4761-804a-eb9ee00fdd77" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 595.911505] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100395, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081791} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.912955] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 595.912955] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973cc4ad-dd59-49ef-a009-02f92b82907c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.937800] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] 8fb62f47-cbf2-4b46-bc33-845e832f9ef0/8fb62f47-cbf2-4b46-bc33-845e832f9ef0.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 595.941681] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b048102f-3caa-4e0e-bd0e-cac4814eda4d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.963649] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 595.963649] env[63538]: value = "task-5100397" [ 595.963649] env[63538]: _type = "Task" [ 595.963649] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.975156] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100397, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.003476] env[63538]: DEBUG oslo_concurrency.lockutils [req-50de407f-205c-4d3d-81b1-a8ef233cc935 req-41130d4d-7f6a-4d00-9f7a-cfd40531dd29 service nova] Releasing lock "refresh_cache-2e1b0bc7-3909-48e2-b9be-26822a57ee67" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 596.040650] env[63538]: DEBUG nova.compute.manager [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 596.047285] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100394, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.110824] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a258e088-9b1d-4ba7-9da8-90bbc23a7a36 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Lock "0a7c34e0-1acc-4761-804a-eb9ee00fdd77" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.807s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 596.112180] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "0a7c34e0-1acc-4761-804a-eb9ee00fdd77" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 19.806s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.112971] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23c5140d-6029-4b26-a6b1-34c7c3c3e4a9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.144104] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100396, 'name': Rename_Task, 'duration_secs': 0.483144} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.144421] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 596.144846] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-db784f5a-5e5d-4da6-9721-31bf5a447b8a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.155511] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 596.155511] env[63538]: value = "task-5100398" [ 596.155511] env[63538]: _type = "Task" [ 596.155511] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.167897] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100398, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.232379] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Acquiring lock "e1710498-0616-4862-afc0-6e452dc19882" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.233353] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Lock "e1710498-0616-4862-afc0-6e452dc19882" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.233353] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Acquiring lock "e1710498-0616-4862-afc0-6e452dc19882-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.233353] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Lock "e1710498-0616-4862-afc0-6e452dc19882-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.233353] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Lock "e1710498-0616-4862-afc0-6e452dc19882-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 596.235564] env[63538]: INFO nova.compute.manager [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Terminating instance [ 596.237666] env[63538]: DEBUG nova.compute.manager [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 596.237863] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 596.238714] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99b6e98f-d99d-471c-bfde-25744b359353 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.248869] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 596.249593] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad0a208f-2720-4599-97e1-b3075020beef {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.258488] env[63538]: DEBUG oslo_vmware.api [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Waiting for the task: (returnval){ [ 596.258488] env[63538]: value = "task-5100399" [ 596.258488] env[63538]: _type = "Task" [ 596.258488] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.270964] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 543875b5-195a-476d-a0b4-3211ceefa27f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 596.273848] env[63538]: DEBUG nova.network.neutron [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Successfully updated port: 589ae6e1-42c9-49d2-bc0b-2615e1b4cfb0 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 596.393664] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.478717] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100397, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.538358] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100394, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.572315] env[63538]: DEBUG oslo_concurrency.lockutils [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.620711] env[63538]: DEBUG nova.compute.manager [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 596.628139] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "0a7c34e0-1acc-4761-804a-eb9ee00fdd77" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.516s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 596.673150] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100398, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.771193] env[63538]: DEBUG oslo_vmware.api [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Task: {'id': task-5100399, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.777380] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance ede967c0-ec3a-4f26-8290-0ee36890cd75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 596.777380] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=63538) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 596.777380] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3008MB phys_disk=100GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '13', 'num_vm_active': '5', 'num_task_None': '4', 'num_os_type_None': '13', 'num_proj_2488493adc8b48d29e615ebcb8a5935e': '1', 'io_workload': '8', 'num_task_deleting': '2', 'num_proj_64662e4603754bd4a9a59035b19992c8': '1', 'num_proj_2ab51bcca7dc40688572337d893c1b4f': '1', 'num_proj_2602b69ba91f4ecca53962b19ccdedc1': '1', 'num_proj_4aaa7ff9816b44e88f2afb0869b10b3d': '1', 'num_vm_building': '8', 'num_task_spawning': '7', 'num_proj_7c1f0c999ede418c866074d9276050ff': '2', 'num_proj_64606c99619d48e1a165d3ab701caf7a': '1', 'num_proj_364d736fc32f4f1caf486e2fa826fa97': '1', 'num_proj_3dc18da1ea704eeaaeb62633c4f76ee8': '1', 'num_proj_6c5e6ed681ed4078bd9115b30f419d9a': '1', 'num_proj_7cc062eb2325468eaa723c9da2117df8': '1', 'num_proj_d7a98c2190944e4284f2c4f02cee8ca2': '1'} {{(pid=63538) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 596.779227] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Acquiring lock "refresh_cache-10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 596.779462] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Acquired lock "refresh_cache-10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.779717] env[63538]: DEBUG nova.network.neutron [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 596.805344] env[63538]: DEBUG nova.network.neutron [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Successfully updated port: ac528a58-339f-4621-890a-afe5e3ee634d {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 596.975641] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100397, 'name': ReconfigVM_Task, 'duration_secs': 0.886358} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.978753] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Reconfigured VM instance instance-0000000a to attach disk [datastore2] 8fb62f47-cbf2-4b46-bc33-845e832f9ef0/8fb62f47-cbf2-4b46-bc33-845e832f9ef0.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 596.980872] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e2c701bf-ddb1-4960-bd2c-9dfd8b0a038b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.989355] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 596.989355] env[63538]: value = "task-5100400" [ 596.989355] env[63538]: _type = "Task" [ 596.989355] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.004130] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100400, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.042074] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100394, 'name': CreateVM_Task, 'duration_secs': 1.614699} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.045224] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 597.047098] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 597.047098] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.047098] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 597.047098] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b7ac32e-70f1-4962-bcaf-34312e3aeef8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.052910] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 597.052910] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c3cce9-5dbb-d4ed-2f91-29234c92d892" [ 597.052910] env[63538]: _type = "Task" [ 597.052910] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.065055] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c3cce9-5dbb-d4ed-2f91-29234c92d892, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.147089] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb1b8bdc-bee3-4fac-acfa-9be22c41690f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.150956] env[63538]: DEBUG oslo_concurrency.lockutils [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.157427] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e9ff248-e736-4911-b2c2-d06f75196fc4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.199455] env[63538]: DEBUG oslo_vmware.api [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100398, 'name': PowerOnVM_Task, 'duration_secs': 0.856143} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.200112] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 597.200326] env[63538]: INFO nova.compute.manager [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Took 13.28 seconds to spawn the instance on the hypervisor. [ 597.200544] env[63538]: DEBUG nova.compute.manager [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 597.201631] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adf2fe4d-e03b-4410-9e4a-c65e6f37ea71 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.205459] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0cb7d02-97fe-46c3-9318-0c98ccd9b490 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.220819] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2175e8-733d-4a14-8198-94c0c7133556 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.241028] env[63538]: DEBUG nova.compute.provider_tree [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 597.272338] env[63538]: DEBUG oslo_vmware.api [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Task: {'id': task-5100399, 'name': PowerOffVM_Task, 'duration_secs': 0.546134} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.273103] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 597.273259] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 597.273535] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a20329d1-810a-40aa-a6b2-a0f3473b608f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.308910] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Acquiring lock "refresh_cache-36d40b69-fae7-4867-afa1-4befdc96bde0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 597.309849] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Acquired lock "refresh_cache-36d40b69-fae7-4867-afa1-4befdc96bde0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.309849] env[63538]: DEBUG nova.network.neutron [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 597.346241] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 597.346478] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 597.346667] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Deleting the datastore file [datastore1] e1710498-0616-4862-afc0-6e452dc19882 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 597.346996] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-49a73aed-70d8-42c9-bd88-8b673c93f6da {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.357884] env[63538]: DEBUG oslo_vmware.api [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Waiting for the task: (returnval){ [ 597.357884] env[63538]: value = "task-5100402" [ 597.357884] env[63538]: _type = "Task" [ 597.357884] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.367487] env[63538]: DEBUG oslo_vmware.api [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Task: {'id': task-5100402, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.368544] env[63538]: DEBUG nova.network.neutron [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 597.387734] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquiring lock "dbf48807-08a7-46d1-8454-42437a9f87c0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.387734] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lock "dbf48807-08a7-46d1-8454-42437a9f87c0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 597.500263] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100400, 'name': Rename_Task, 'duration_secs': 0.240322} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.500570] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 597.500999] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6720452f-f72c-46c4-aea2-25363d92614f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.509663] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 597.509663] env[63538]: value = "task-5100403" [ 597.509663] env[63538]: _type = "Task" [ 597.509663] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.521224] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100403, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.565605] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c3cce9-5dbb-d4ed-2f91-29234c92d892, 'name': SearchDatastore_Task, 'duration_secs': 0.018781} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.565920] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 597.566184] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 597.566551] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 597.566643] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.566795] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 597.567082] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0725a27-bc64-4959-9e34-c2ef39fff4f7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.580084] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 597.580272] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 597.581058] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f42e9b8-285e-4c14-8c1d-7490c9e010dd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.588026] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 597.588026] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f6cdbe-f0cb-f2cd-f3c2-74a61b2f72fb" [ 597.588026] env[63538]: _type = "Task" [ 597.588026] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.598217] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f6cdbe-f0cb-f2cd-f3c2-74a61b2f72fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.640026] env[63538]: DEBUG nova.network.neutron [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Updating instance_info_cache with network_info: [{"id": "589ae6e1-42c9-49d2-bc0b-2615e1b4cfb0", "address": "fa:16:3e:93:33:99", "network": {"id": "492a5c75-49db-432b-b813-9ae4c6da541f", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-492223577-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cc062eb2325468eaa723c9da2117df8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap589ae6e1-42", "ovs_interfaceid": "589ae6e1-42c9-49d2-bc0b-2615e1b4cfb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.742786] env[63538]: INFO nova.compute.manager [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Took 31.43 seconds to build instance. [ 597.743955] env[63538]: DEBUG nova.scheduler.client.report [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 597.846129] env[63538]: DEBUG nova.compute.manager [None req-87d229ca-ae81-4f4b-a219-4b3a11b44991 tempest-ServerExternalEventsTest-1343264281 tempest-ServerExternalEventsTest-1343264281-project] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Received event network-changed {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 597.846417] env[63538]: DEBUG nova.compute.manager [None req-87d229ca-ae81-4f4b-a219-4b3a11b44991 tempest-ServerExternalEventsTest-1343264281 tempest-ServerExternalEventsTest-1343264281-project] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Refreshing instance network info cache due to event network-changed. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 597.846544] env[63538]: DEBUG oslo_concurrency.lockutils [None req-87d229ca-ae81-4f4b-a219-4b3a11b44991 tempest-ServerExternalEventsTest-1343264281 tempest-ServerExternalEventsTest-1343264281-project] Acquiring lock "refresh_cache-0a7c34e0-1acc-4761-804a-eb9ee00fdd77" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 597.846687] env[63538]: DEBUG oslo_concurrency.lockutils [None req-87d229ca-ae81-4f4b-a219-4b3a11b44991 tempest-ServerExternalEventsTest-1343264281 tempest-ServerExternalEventsTest-1343264281-project] Acquired lock "refresh_cache-0a7c34e0-1acc-4761-804a-eb9ee00fdd77" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.846867] env[63538]: DEBUG nova.network.neutron [None req-87d229ca-ae81-4f4b-a219-4b3a11b44991 tempest-ServerExternalEventsTest-1343264281 tempest-ServerExternalEventsTest-1343264281-project] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 597.869861] env[63538]: DEBUG nova.network.neutron [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 597.875497] env[63538]: DEBUG oslo_vmware.api [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Task: {'id': task-5100402, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.257179} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.876435] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 597.876807] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 597.876880] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 597.878026] env[63538]: INFO nova.compute.manager [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] [instance: e1710498-0616-4862-afc0-6e452dc19882] Took 1.64 seconds to destroy the instance on the hypervisor. [ 597.878026] env[63538]: DEBUG oslo.service.loopingcall [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 597.878026] env[63538]: DEBUG nova.compute.manager [-] [instance: e1710498-0616-4862-afc0-6e452dc19882] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 597.878026] env[63538]: DEBUG nova.network.neutron [-] [instance: e1710498-0616-4862-afc0-6e452dc19882] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 598.021209] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100403, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.102173] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f6cdbe-f0cb-f2cd-f3c2-74a61b2f72fb, 'name': SearchDatastore_Task, 'duration_secs': 0.014116} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.103061] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41b8e13a-5f83-4fc5-ba34-ce9b59bd24de {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.108813] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 598.108813] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bf4cee-e4c6-1b69-96d7-369480728136" [ 598.108813] env[63538]: _type = "Task" [ 598.108813] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.118595] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bf4cee-e4c6-1b69-96d7-369480728136, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.143113] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Releasing lock "refresh_cache-10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.143875] env[63538]: DEBUG nova.compute.manager [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Instance network_info: |[{"id": "589ae6e1-42c9-49d2-bc0b-2615e1b4cfb0", "address": "fa:16:3e:93:33:99", "network": {"id": "492a5c75-49db-432b-b813-9ae4c6da541f", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-492223577-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cc062eb2325468eaa723c9da2117df8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap589ae6e1-42", "ovs_interfaceid": "589ae6e1-42c9-49d2-bc0b-2615e1b4cfb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 598.144369] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:33:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb0e556a-0f69-4a5c-af62-ffc46edb8e63', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '589ae6e1-42c9-49d2-bc0b-2615e1b4cfb0', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 598.153854] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Creating folder: Project (7cc062eb2325468eaa723c9da2117df8). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 598.153854] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-59b2087c-41d1-48d2-bc2b-a8e09d9697e9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.167733] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Created folder: Project (7cc062eb2325468eaa723c9da2117df8) in parent group-v992234. [ 598.167733] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Creating folder: Instances. Parent ref: group-v992266. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 598.167733] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9706cf27-3952-47b5-bcd8-fd66a7eced17 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.177860] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Created folder: Instances in parent group-v992266. [ 598.178138] env[63538]: DEBUG oslo.service.loopingcall [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 598.178350] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 598.178571] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb604242-4912-40a1-8c64-0f15d9f6e7e8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.204482] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 598.204482] env[63538]: value = "task-5100406" [ 598.204482] env[63538]: _type = "Task" [ 598.204482] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.216685] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100406, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.250687] env[63538]: DEBUG nova.network.neutron [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Updating instance_info_cache with network_info: [{"id": "ac528a58-339f-4621-890a-afe5e3ee634d", "address": "fa:16:3e:ec:27:93", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.143", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac528a58-33", "ovs_interfaceid": "ac528a58-339f-4621-890a-afe5e3ee634d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.252218] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e61a2438-b9cd-4342-928a-24a411888af1 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "2e1b0bc7-3909-48e2-b9be-26822a57ee67" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.949s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.254241] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63538) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 598.254241] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.073s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.256582] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.180s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.259865] env[63538]: INFO nova.compute.claims [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 598.263034] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 598.263468] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Getting list of instances from cluster (obj){ [ 598.263468] env[63538]: value = "domain-c8" [ 598.263468] env[63538]: _type = "ClusterComputeResource" [ 598.263468] env[63538]: } {{(pid=63538) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 598.265202] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c32ab53a-5f04-481e-b570-85d16a30a9dd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.285577] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Got total of 8 instances {{(pid=63538) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 598.474782] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Acquiring lock "7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.474974] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Lock "7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.498766] env[63538]: DEBUG nova.compute.manager [req-86d7f665-731c-4bd8-9296-c2ed5b5a95bd req-9f005b57-02a4-4d07-bfed-133a7a1df22d service nova] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Received event network-changed-5b823d56-3162-4875-a202-4526e8d9e433 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 598.498955] env[63538]: DEBUG nova.compute.manager [req-86d7f665-731c-4bd8-9296-c2ed5b5a95bd req-9f005b57-02a4-4d07-bfed-133a7a1df22d service nova] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Refreshing instance network info cache due to event network-changed-5b823d56-3162-4875-a202-4526e8d9e433. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 598.499181] env[63538]: DEBUG oslo_concurrency.lockutils [req-86d7f665-731c-4bd8-9296-c2ed5b5a95bd req-9f005b57-02a4-4d07-bfed-133a7a1df22d service nova] Acquiring lock "refresh_cache-8fb62f47-cbf2-4b46-bc33-845e832f9ef0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.499315] env[63538]: DEBUG oslo_concurrency.lockutils [req-86d7f665-731c-4bd8-9296-c2ed5b5a95bd req-9f005b57-02a4-4d07-bfed-133a7a1df22d service nova] Acquired lock "refresh_cache-8fb62f47-cbf2-4b46-bc33-845e832f9ef0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.500025] env[63538]: DEBUG nova.network.neutron [req-86d7f665-731c-4bd8-9296-c2ed5b5a95bd req-9f005b57-02a4-4d07-bfed-133a7a1df22d service nova] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Refreshing network info cache for port 5b823d56-3162-4875-a202-4526e8d9e433 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 598.521448] env[63538]: DEBUG oslo_vmware.api [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100403, 'name': PowerOnVM_Task, 'duration_secs': 0.942738} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.521448] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 598.521448] env[63538]: INFO nova.compute.manager [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Took 12.15 seconds to spawn the instance on the hypervisor. [ 598.521596] env[63538]: DEBUG nova.compute.manager [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 598.522528] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-797a665d-b1c9-4877-a6e4-cb854c551680 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.630395] env[63538]: DEBUG nova.compute.manager [req-b7d5ea88-4a81-42bf-a0b2-0440f2f63090 req-95d7bdec-dcdb-43b7-a157-7ff29e90fa9f service nova] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Received event network-vif-deleted-0fbe2e21-79c6-4b82-a23c-a25b732e78b7 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 598.632071] env[63538]: DEBUG nova.compute.manager [req-b7d5ea88-4a81-42bf-a0b2-0440f2f63090 req-95d7bdec-dcdb-43b7-a157-7ff29e90fa9f service nova] [instance: 174368d1-9910-495b-a923-842e0440fd01] Received event network-changed-51784c09-2e46-4add-9f20-a0a9563f7eaf {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 598.632768] env[63538]: DEBUG nova.compute.manager [req-b7d5ea88-4a81-42bf-a0b2-0440f2f63090 req-95d7bdec-dcdb-43b7-a157-7ff29e90fa9f service nova] [instance: 174368d1-9910-495b-a923-842e0440fd01] Refreshing instance network info cache due to event network-changed-51784c09-2e46-4add-9f20-a0a9563f7eaf. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 598.633552] env[63538]: DEBUG oslo_concurrency.lockutils [req-b7d5ea88-4a81-42bf-a0b2-0440f2f63090 req-95d7bdec-dcdb-43b7-a157-7ff29e90fa9f service nova] Acquiring lock "refresh_cache-174368d1-9910-495b-a923-842e0440fd01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.634016] env[63538]: DEBUG oslo_concurrency.lockutils [req-b7d5ea88-4a81-42bf-a0b2-0440f2f63090 req-95d7bdec-dcdb-43b7-a157-7ff29e90fa9f service nova] Acquired lock "refresh_cache-174368d1-9910-495b-a923-842e0440fd01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.634698] env[63538]: DEBUG nova.network.neutron [req-b7d5ea88-4a81-42bf-a0b2-0440f2f63090 req-95d7bdec-dcdb-43b7-a157-7ff29e90fa9f service nova] [instance: 174368d1-9910-495b-a923-842e0440fd01] Refreshing network info cache for port 51784c09-2e46-4add-9f20-a0a9563f7eaf {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 598.651433] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bf4cee-e4c6-1b69-96d7-369480728136, 'name': SearchDatastore_Task, 'duration_secs': 0.021815} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.652778] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.654515] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] f9fa5578-acf3-416f-9cb0-8ceb00e5132d/f9fa5578-acf3-416f-9cb0-8ceb00e5132d.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 598.655431] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3daf34a6-8725-4a6c-8daa-c1f564d1b0aa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.669933] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 598.669933] env[63538]: value = "task-5100407" [ 598.669933] env[63538]: _type = "Task" [ 598.669933] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.679604] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100407, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.718378] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100406, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.740504] env[63538]: DEBUG nova.network.neutron [-] [instance: e1710498-0616-4862-afc0-6e452dc19882] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.757050] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Releasing lock "refresh_cache-36d40b69-fae7-4867-afa1-4befdc96bde0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.757050] env[63538]: DEBUG nova.compute.manager [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Instance network_info: |[{"id": "ac528a58-339f-4621-890a-afe5e3ee634d", "address": "fa:16:3e:ec:27:93", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.143", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac528a58-33", "ovs_interfaceid": "ac528a58-339f-4621-890a-afe5e3ee634d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 598.757348] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:27:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24144f5a-050a-4f1e-8d8c-774dc16dc791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ac528a58-339f-4621-890a-afe5e3ee634d', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 598.764835] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Creating folder: Project (d7a98c2190944e4284f2c4f02cee8ca2). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 598.767577] env[63538]: DEBUG nova.compute.manager [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 598.778271] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-af63e026-f081-4aa0-9faa-062f9be94ddf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.788332] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Created folder: Project (d7a98c2190944e4284f2c4f02cee8ca2) in parent group-v992234. [ 598.788332] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Creating folder: Instances. Parent ref: group-v992269. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 598.788965] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f6f78547-0fb2-4790-982b-a56633623e47 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.804111] env[63538]: DEBUG nova.compute.manager [req-0cb063df-94d1-4fd2-964e-55d020a2c5e5 req-fb0714e3-16d6-4b87-843b-911af986f5a7 service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Received event network-vif-plugged-a5dfe48b-4acc-472c-8e00-f936b4068ea5 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 598.804111] env[63538]: DEBUG oslo_concurrency.lockutils [req-0cb063df-94d1-4fd2-964e-55d020a2c5e5 req-fb0714e3-16d6-4b87-843b-911af986f5a7 service nova] Acquiring lock "f9fa5578-acf3-416f-9cb0-8ceb00e5132d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.804502] env[63538]: DEBUG oslo_concurrency.lockutils [req-0cb063df-94d1-4fd2-964e-55d020a2c5e5 req-fb0714e3-16d6-4b87-843b-911af986f5a7 service nova] Lock "f9fa5578-acf3-416f-9cb0-8ceb00e5132d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.804676] env[63538]: DEBUG oslo_concurrency.lockutils [req-0cb063df-94d1-4fd2-964e-55d020a2c5e5 req-fb0714e3-16d6-4b87-843b-911af986f5a7 service nova] Lock "f9fa5578-acf3-416f-9cb0-8ceb00e5132d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.804934] env[63538]: DEBUG nova.compute.manager [req-0cb063df-94d1-4fd2-964e-55d020a2c5e5 req-fb0714e3-16d6-4b87-843b-911af986f5a7 service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] No waiting events found dispatching network-vif-plugged-a5dfe48b-4acc-472c-8e00-f936b4068ea5 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 598.805154] env[63538]: WARNING nova.compute.manager [req-0cb063df-94d1-4fd2-964e-55d020a2c5e5 req-fb0714e3-16d6-4b87-843b-911af986f5a7 service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Received unexpected event network-vif-plugged-a5dfe48b-4acc-472c-8e00-f936b4068ea5 for instance with vm_state building and task_state spawning. [ 598.809420] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Created folder: Instances in parent group-v992269. [ 598.809633] env[63538]: DEBUG oslo.service.loopingcall [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 598.810622] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 598.810622] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-466f58d5-c512-4c3c-be3f-cc5c1aa06a7e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.827841] env[63538]: DEBUG nova.network.neutron [None req-87d229ca-ae81-4f4b-a219-4b3a11b44991 tempest-ServerExternalEventsTest-1343264281 tempest-ServerExternalEventsTest-1343264281-project] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Updating instance_info_cache with network_info: [{"id": "5e9b18aa-a332-400c-9c74-aed76633d8b5", "address": "fa:16:3e:93:27:04", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.53", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e9b18aa-a3", "ovs_interfaceid": "5e9b18aa-a332-400c-9c74-aed76633d8b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.841887] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 598.841887] env[63538]: value = "task-5100411" [ 598.841887] env[63538]: _type = "Task" [ 598.841887] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.852467] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100411, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.045023] env[63538]: INFO nova.compute.manager [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Took 31.77 seconds to build instance. [ 599.186287] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100407, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.218439] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100406, 'name': CreateVM_Task, 'duration_secs': 0.598462} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.219061] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 599.222026] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 599.222026] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.222026] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 599.222026] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcb4f7f5-9183-48f7-b9c4-ade320b98bb3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.230317] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Waiting for the task: (returnval){ [ 599.230317] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5226a090-d683-8560-a387-5890feb7a8b6" [ 599.230317] env[63538]: _type = "Task" [ 599.230317] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.232125] env[63538]: DEBUG nova.compute.manager [None req-a037e4ae-1bb4-4ac8-91bb-1f245a25ca46 tempest-ServerDiagnosticsV248Test-2070800695 tempest-ServerDiagnosticsV248Test-2070800695-project-admin] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 599.233847] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53eeac00-cb47-4ca3-a5b6-c1f92397a476 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.246629] env[63538]: INFO nova.compute.manager [-] [instance: e1710498-0616-4862-afc0-6e452dc19882] Took 1.37 seconds to deallocate network for instance. [ 599.247056] env[63538]: INFO nova.compute.manager [None req-a037e4ae-1bb4-4ac8-91bb-1f245a25ca46 tempest-ServerDiagnosticsV248Test-2070800695 tempest-ServerDiagnosticsV248Test-2070800695-project-admin] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Retrieving diagnostics [ 599.258291] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1991ee0-6a7b-474f-9d5e-5a77653e87db {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.261299] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5226a090-d683-8560-a387-5890feb7a8b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.331715] env[63538]: DEBUG oslo_concurrency.lockutils [None req-87d229ca-ae81-4f4b-a219-4b3a11b44991 tempest-ServerExternalEventsTest-1343264281 tempest-ServerExternalEventsTest-1343264281-project] Releasing lock "refresh_cache-0a7c34e0-1acc-4761-804a-eb9ee00fdd77" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.340831] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.359962] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100411, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.458686] env[63538]: DEBUG nova.network.neutron [req-86d7f665-731c-4bd8-9296-c2ed5b5a95bd req-9f005b57-02a4-4d07-bfed-133a7a1df22d service nova] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Updated VIF entry in instance network info cache for port 5b823d56-3162-4875-a202-4526e8d9e433. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 599.459065] env[63538]: DEBUG nova.network.neutron [req-86d7f665-731c-4bd8-9296-c2ed5b5a95bd req-9f005b57-02a4-4d07-bfed-133a7a1df22d service nova] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Updating instance_info_cache with network_info: [{"id": "5b823d56-3162-4875-a202-4526e8d9e433", "address": "fa:16:3e:11:db:ad", "network": {"id": "2801e625-b23c-455a-80ae-80f2a4f34148", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1431071033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1f0c999ede418c866074d9276050ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b823d56-31", "ovs_interfaceid": "5b823d56-3162-4875-a202-4526e8d9e433", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.460753] env[63538]: DEBUG nova.network.neutron [req-b7d5ea88-4a81-42bf-a0b2-0440f2f63090 req-95d7bdec-dcdb-43b7-a157-7ff29e90fa9f service nova] [instance: 174368d1-9910-495b-a923-842e0440fd01] Updated VIF entry in instance network info cache for port 51784c09-2e46-4add-9f20-a0a9563f7eaf. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 599.461335] env[63538]: DEBUG nova.network.neutron [req-b7d5ea88-4a81-42bf-a0b2-0440f2f63090 req-95d7bdec-dcdb-43b7-a157-7ff29e90fa9f service nova] [instance: 174368d1-9910-495b-a923-842e0440fd01] Updating instance_info_cache with network_info: [{"id": "51784c09-2e46-4add-9f20-a0a9563f7eaf", "address": "fa:16:3e:df:3b:0a", "network": {"id": "1a29c626-75f0-4d3c-b7cf-2266e6dfe02b", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-706845743-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.244", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2488493adc8b48d29e615ebcb8a5935e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca401eaa-889a-4f9f-ac9a-56b4c41bfc06", "external-id": "nsx-vlan-transportzone-877", "segmentation_id": 877, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51784c09-2e", "ovs_interfaceid": "51784c09-2e46-4add-9f20-a0a9563f7eaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.547488] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c6896b53-4ac4-43ac-92bc-0cf8ad9908aa tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "8fb62f47-cbf2-4b46-bc33-845e832f9ef0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.288s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 599.615903] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef18c6af-baac-4eac-90ad-34e305da7edf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.625836] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-518d791f-720a-481d-91c1-fb130051d062 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.661194] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f68e15be-0e64-420b-9caf-834bd0e9d7d2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.667737] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d928936a-1f92-46b8-9f98-b6713f80ceec {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.679983] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100407, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.756842} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.688182] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] f9fa5578-acf3-416f-9cb0-8ceb00e5132d/f9fa5578-acf3-416f-9cb0-8ceb00e5132d.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 599.688420] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 599.688903] env[63538]: DEBUG nova.compute.provider_tree [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 599.690058] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aecadcc9-6aaa-4267-bc6f-8004181b66be {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.697350] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 599.697350] env[63538]: value = "task-5100415" [ 599.697350] env[63538]: _type = "Task" [ 599.697350] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.706505] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100415, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.745159] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5226a090-d683-8560-a387-5890feb7a8b6, 'name': SearchDatastore_Task, 'duration_secs': 0.038459} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.745916] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.745916] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 599.745916] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 599.746123] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.746225] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 599.746484] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-86510485-6ea3-49e8-9720-2dc0016650e4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.755849] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 599.756079] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 599.757100] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a558880-d3eb-4385-885e-e302298df039 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.763179] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Waiting for the task: (returnval){ [ 599.763179] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52375b79-2e60-3545-e3c0-4ad902b2a6e8" [ 599.763179] env[63538]: _type = "Task" [ 599.763179] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.767645] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.772930] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52375b79-2e60-3545-e3c0-4ad902b2a6e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.851414] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100411, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.964354] env[63538]: DEBUG oslo_concurrency.lockutils [req-86d7f665-731c-4bd8-9296-c2ed5b5a95bd req-9f005b57-02a4-4d07-bfed-133a7a1df22d service nova] Releasing lock "refresh_cache-8fb62f47-cbf2-4b46-bc33-845e832f9ef0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.964354] env[63538]: DEBUG nova.compute.manager [req-86d7f665-731c-4bd8-9296-c2ed5b5a95bd req-9f005b57-02a4-4d07-bfed-133a7a1df22d service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Received event network-changed-a5dfe48b-4acc-472c-8e00-f936b4068ea5 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 599.964354] env[63538]: DEBUG nova.compute.manager [req-86d7f665-731c-4bd8-9296-c2ed5b5a95bd req-9f005b57-02a4-4d07-bfed-133a7a1df22d service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Refreshing instance network info cache due to event network-changed-a5dfe48b-4acc-472c-8e00-f936b4068ea5. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 599.964898] env[63538]: DEBUG oslo_concurrency.lockutils [req-86d7f665-731c-4bd8-9296-c2ed5b5a95bd req-9f005b57-02a4-4d07-bfed-133a7a1df22d service nova] Acquiring lock "refresh_cache-f9fa5578-acf3-416f-9cb0-8ceb00e5132d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 599.964898] env[63538]: DEBUG oslo_concurrency.lockutils [req-86d7f665-731c-4bd8-9296-c2ed5b5a95bd req-9f005b57-02a4-4d07-bfed-133a7a1df22d service nova] Acquired lock "refresh_cache-f9fa5578-acf3-416f-9cb0-8ceb00e5132d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.964898] env[63538]: DEBUG nova.network.neutron [req-86d7f665-731c-4bd8-9296-c2ed5b5a95bd req-9f005b57-02a4-4d07-bfed-133a7a1df22d service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Refreshing network info cache for port a5dfe48b-4acc-472c-8e00-f936b4068ea5 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 599.966062] env[63538]: DEBUG oslo_concurrency.lockutils [req-b7d5ea88-4a81-42bf-a0b2-0440f2f63090 req-95d7bdec-dcdb-43b7-a157-7ff29e90fa9f service nova] Releasing lock "refresh_cache-174368d1-9910-495b-a923-842e0440fd01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.966323] env[63538]: DEBUG nova.compute.manager [req-b7d5ea88-4a81-42bf-a0b2-0440f2f63090 req-95d7bdec-dcdb-43b7-a157-7ff29e90fa9f service nova] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Received event network-vif-plugged-589ae6e1-42c9-49d2-bc0b-2615e1b4cfb0 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 599.966466] env[63538]: DEBUG oslo_concurrency.lockutils [req-b7d5ea88-4a81-42bf-a0b2-0440f2f63090 req-95d7bdec-dcdb-43b7-a157-7ff29e90fa9f service nova] Acquiring lock "10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.966691] env[63538]: DEBUG oslo_concurrency.lockutils [req-b7d5ea88-4a81-42bf-a0b2-0440f2f63090 req-95d7bdec-dcdb-43b7-a157-7ff29e90fa9f service nova] Lock "10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.966806] env[63538]: DEBUG oslo_concurrency.lockutils [req-b7d5ea88-4a81-42bf-a0b2-0440f2f63090 req-95d7bdec-dcdb-43b7-a157-7ff29e90fa9f service nova] Lock "10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 599.966972] env[63538]: DEBUG nova.compute.manager [req-b7d5ea88-4a81-42bf-a0b2-0440f2f63090 req-95d7bdec-dcdb-43b7-a157-7ff29e90fa9f service nova] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] No waiting events found dispatching network-vif-plugged-589ae6e1-42c9-49d2-bc0b-2615e1b4cfb0 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 599.967156] env[63538]: WARNING nova.compute.manager [req-b7d5ea88-4a81-42bf-a0b2-0440f2f63090 req-95d7bdec-dcdb-43b7-a157-7ff29e90fa9f service nova] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Received unexpected event network-vif-plugged-589ae6e1-42c9-49d2-bc0b-2615e1b4cfb0 for instance with vm_state building and task_state spawning. [ 599.967542] env[63538]: DEBUG nova.compute.manager [req-b7d5ea88-4a81-42bf-a0b2-0440f2f63090 req-95d7bdec-dcdb-43b7-a157-7ff29e90fa9f service nova] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Received event network-changed-589ae6e1-42c9-49d2-bc0b-2615e1b4cfb0 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 599.967542] env[63538]: DEBUG nova.compute.manager [req-b7d5ea88-4a81-42bf-a0b2-0440f2f63090 req-95d7bdec-dcdb-43b7-a157-7ff29e90fa9f service nova] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Refreshing instance network info cache due to event network-changed-589ae6e1-42c9-49d2-bc0b-2615e1b4cfb0. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 599.967653] env[63538]: DEBUG oslo_concurrency.lockutils [req-b7d5ea88-4a81-42bf-a0b2-0440f2f63090 req-95d7bdec-dcdb-43b7-a157-7ff29e90fa9f service nova] Acquiring lock "refresh_cache-10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 599.967846] env[63538]: DEBUG oslo_concurrency.lockutils [req-b7d5ea88-4a81-42bf-a0b2-0440f2f63090 req-95d7bdec-dcdb-43b7-a157-7ff29e90fa9f service nova] Acquired lock "refresh_cache-10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.967909] env[63538]: DEBUG nova.network.neutron [req-b7d5ea88-4a81-42bf-a0b2-0440f2f63090 req-95d7bdec-dcdb-43b7-a157-7ff29e90fa9f service nova] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Refreshing network info cache for port 589ae6e1-42c9-49d2-bc0b-2615e1b4cfb0 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 600.050443] env[63538]: DEBUG nova.compute.manager [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 600.193594] env[63538]: DEBUG nova.scheduler.client.report [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 600.209323] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100415, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.250023} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.213069] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 600.213069] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1af2b75-ba2a-4b78-a1a6-a218301bd929 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.237619] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] f9fa5578-acf3-416f-9cb0-8ceb00e5132d/f9fa5578-acf3-416f-9cb0-8ceb00e5132d.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 600.238511] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-857f07b7-6181-423b-be2c-d851f5769e76 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.263129] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 600.263129] env[63538]: value = "task-5100416" [ 600.263129] env[63538]: _type = "Task" [ 600.263129] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.285153] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52375b79-2e60-3545-e3c0-4ad902b2a6e8, 'name': SearchDatastore_Task, 'duration_secs': 0.013501} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.289393] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100416, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.289637] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12d0d257-cdb5-4ed2-8dd7-6a5ea1c33f60 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.296671] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Waiting for the task: (returnval){ [ 600.296671] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521e65bb-7cdd-7898-e442-07d7b6a3f76b" [ 600.296671] env[63538]: _type = "Task" [ 600.296671] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.308713] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521e65bb-7cdd-7898-e442-07d7b6a3f76b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.355926] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100411, 'name': CreateVM_Task, 'duration_secs': 1.449437} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.355926] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 600.355926] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.355926] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.355926] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 600.356445] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6662b36d-184e-4cb6-840b-79e39124db3f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.360703] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Waiting for the task: (returnval){ [ 600.360703] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5259bb20-c1cd-c6d8-d86b-4e4ae57493ec" [ 600.360703] env[63538]: _type = "Task" [ 600.360703] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.373424] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5259bb20-c1cd-c6d8-d86b-4e4ae57493ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.576679] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.698713] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.443s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.699267] env[63538]: DEBUG nova.compute.manager [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 600.702674] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.112s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.706114] env[63538]: INFO nova.compute.claims [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 600.775945] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100416, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.808161] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521e65bb-7cdd-7898-e442-07d7b6a3f76b, 'name': SearchDatastore_Task, 'duration_secs': 0.025253} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.809679] env[63538]: DEBUG nova.network.neutron [req-86d7f665-731c-4bd8-9296-c2ed5b5a95bd req-9f005b57-02a4-4d07-bfed-133a7a1df22d service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Updated VIF entry in instance network info cache for port a5dfe48b-4acc-472c-8e00-f936b4068ea5. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 600.809679] env[63538]: DEBUG nova.network.neutron [req-86d7f665-731c-4bd8-9296-c2ed5b5a95bd req-9f005b57-02a4-4d07-bfed-133a7a1df22d service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Updating instance_info_cache with network_info: [{"id": "a5dfe48b-4acc-472c-8e00-f936b4068ea5", "address": "fa:16:3e:a9:56:1a", "network": {"id": "4fd42fb1-fcc5-467d-88b7-1a5ab3297631", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-9303923-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c5e6ed681ed4078bd9115b30f419d9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5dfe48b-4a", "ovs_interfaceid": "a5dfe48b-4acc-472c-8e00-f936b4068ea5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.813583] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.814133] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf/10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 600.815666] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d40097a4-b66f-4831-9cab-0a967e9202a6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.822877] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Waiting for the task: (returnval){ [ 600.822877] env[63538]: value = "task-5100417" [ 600.822877] env[63538]: _type = "Task" [ 600.822877] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.833528] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Task: {'id': task-5100417, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.872139] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5259bb20-c1cd-c6d8-d86b-4e4ae57493ec, 'name': SearchDatastore_Task, 'duration_secs': 0.017275} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.872477] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.872715] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 600.873010] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.873203] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.873389] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 600.873661] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b38ccd64-ad7e-4321-9349-5100c1961986 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.894121] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 600.894329] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 600.895102] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-942334ee-9174-4bde-b1b0-d6fd1170c5e3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.901785] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Waiting for the task: (returnval){ [ 600.901785] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5209d534-4c20-0f55-b9b5-b990306c05dc" [ 600.901785] env[63538]: _type = "Task" [ 600.901785] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.910859] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5209d534-4c20-0f55-b9b5-b990306c05dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.023634] env[63538]: DEBUG oslo_concurrency.lockutils [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Acquiring lock "0a7c34e0-1acc-4761-804a-eb9ee00fdd77" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.024101] env[63538]: DEBUG oslo_concurrency.lockutils [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Lock "0a7c34e0-1acc-4761-804a-eb9ee00fdd77" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.024246] env[63538]: DEBUG oslo_concurrency.lockutils [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Acquiring lock "0a7c34e0-1acc-4761-804a-eb9ee00fdd77-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.024508] env[63538]: DEBUG oslo_concurrency.lockutils [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Lock "0a7c34e0-1acc-4761-804a-eb9ee00fdd77-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.024730] env[63538]: DEBUG oslo_concurrency.lockutils [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Lock "0a7c34e0-1acc-4761-804a-eb9ee00fdd77-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 601.027277] env[63538]: INFO nova.compute.manager [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Terminating instance [ 601.030516] env[63538]: DEBUG nova.network.neutron [req-b7d5ea88-4a81-42bf-a0b2-0440f2f63090 req-95d7bdec-dcdb-43b7-a157-7ff29e90fa9f service nova] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Updated VIF entry in instance network info cache for port 589ae6e1-42c9-49d2-bc0b-2615e1b4cfb0. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 601.030965] env[63538]: DEBUG nova.network.neutron [req-b7d5ea88-4a81-42bf-a0b2-0440f2f63090 req-95d7bdec-dcdb-43b7-a157-7ff29e90fa9f service nova] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Updating instance_info_cache with network_info: [{"id": "589ae6e1-42c9-49d2-bc0b-2615e1b4cfb0", "address": "fa:16:3e:93:33:99", "network": {"id": "492a5c75-49db-432b-b813-9ae4c6da541f", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-492223577-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cc062eb2325468eaa723c9da2117df8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap589ae6e1-42", "ovs_interfaceid": "589ae6e1-42c9-49d2-bc0b-2615e1b4cfb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.034682] env[63538]: DEBUG nova.compute.manager [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 601.034682] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 601.034682] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04b3e797-e629-4930-9a08-4782340acdbb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.045951] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 601.047172] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-92d3a548-8c12-4604-8327-bf8559e6eee6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.056725] env[63538]: DEBUG oslo_vmware.api [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Waiting for the task: (returnval){ [ 601.056725] env[63538]: value = "task-5100418" [ 601.056725] env[63538]: _type = "Task" [ 601.056725] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.067702] env[63538]: DEBUG oslo_vmware.api [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Task: {'id': task-5100418, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.212207] env[63538]: DEBUG nova.compute.utils [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 601.216811] env[63538]: DEBUG nova.compute.manager [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 601.217035] env[63538]: DEBUG nova.network.neutron [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 601.282064] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100416, 'name': ReconfigVM_Task, 'duration_secs': 0.746908} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.282064] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Reconfigured VM instance instance-0000000b to attach disk [datastore2] f9fa5578-acf3-416f-9cb0-8ceb00e5132d/f9fa5578-acf3-416f-9cb0-8ceb00e5132d.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 601.282064] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9fb6672c-a710-49b9-b05c-a3df64ff70e1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.289924] env[63538]: DEBUG nova.policy [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7820496dc6e144f8b86efc4bbea09e87', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '22554cf5d2194573ba27c6236d2c3ad2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 601.295387] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 601.295387] env[63538]: value = "task-5100419" [ 601.295387] env[63538]: _type = "Task" [ 601.295387] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.309532] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100419, 'name': Rename_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.315932] env[63538]: DEBUG oslo_concurrency.lockutils [req-86d7f665-731c-4bd8-9296-c2ed5b5a95bd req-9f005b57-02a4-4d07-bfed-133a7a1df22d service nova] Releasing lock "refresh_cache-f9fa5578-acf3-416f-9cb0-8ceb00e5132d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 601.316434] env[63538]: DEBUG nova.compute.manager [req-86d7f665-731c-4bd8-9296-c2ed5b5a95bd req-9f005b57-02a4-4d07-bfed-133a7a1df22d service nova] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Received event network-vif-deleted-937fc657-0b54-4a28-98fe-43139e1ba61c {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 601.339901] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Task: {'id': task-5100417, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.416647] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5209d534-4c20-0f55-b9b5-b990306c05dc, 'name': SearchDatastore_Task, 'duration_secs': 0.012012} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.418384] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48d6f69e-1093-4729-a826-1bc52a458528 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.430710] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Waiting for the task: (returnval){ [ 601.430710] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c75ba6-0e25-a520-8828-8a544cbf2fe2" [ 601.430710] env[63538]: _type = "Task" [ 601.430710] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.444409] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c75ba6-0e25-a520-8828-8a544cbf2fe2, 'name': SearchDatastore_Task, 'duration_secs': 0.014724} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.445297] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 601.445798] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 36d40b69-fae7-4867-afa1-4befdc96bde0/36d40b69-fae7-4867-afa1-4befdc96bde0.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 601.446056] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef8854ed-637f-4055-aa4f-3cfe0531fdfe {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.455561] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Waiting for the task: (returnval){ [ 601.455561] env[63538]: value = "task-5100420" [ 601.455561] env[63538]: _type = "Task" [ 601.455561] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.465998] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100420, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.538840] env[63538]: DEBUG oslo_concurrency.lockutils [req-b7d5ea88-4a81-42bf-a0b2-0440f2f63090 req-95d7bdec-dcdb-43b7-a157-7ff29e90fa9f service nova] Releasing lock "refresh_cache-10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 601.568904] env[63538]: DEBUG oslo_vmware.api [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Task: {'id': task-5100418, 'name': PowerOffVM_Task, 'duration_secs': 0.379422} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.569364] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 601.569643] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 601.571802] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3d2018f-d007-4f89-9604-d638990f2cb9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.672418] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 601.672925] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 601.673309] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Deleting the datastore file [datastore2] 0a7c34e0-1acc-4761-804a-eb9ee00fdd77 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 601.674174] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a15d69ce-1e41-4788-b7e9-4cf9e2765888 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.687904] env[63538]: DEBUG oslo_vmware.api [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Waiting for the task: (returnval){ [ 601.687904] env[63538]: value = "task-5100422" [ 601.687904] env[63538]: _type = "Task" [ 601.687904] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.703025] env[63538]: DEBUG oslo_vmware.api [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Task: {'id': task-5100422, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.718599] env[63538]: DEBUG nova.compute.manager [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 601.792454] env[63538]: DEBUG nova.network.neutron [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Successfully created port: 33b2bb84-c893-4ee8-90de-6696ef21d830 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 601.817047] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100419, 'name': Rename_Task, 'duration_secs': 0.282657} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.817587] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 601.817848] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-58e6ab20-ea80-4a16-815f-9974cffc524f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.838435] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 601.838435] env[63538]: value = "task-5100423" [ 601.838435] env[63538]: _type = "Task" [ 601.838435] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.848028] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Task: {'id': task-5100417, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.589831} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.851962] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf/10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 601.852330] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 601.853812] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1a1742a2-f082-477d-8d11-116edfdecaf3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.861046] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100423, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.868276] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Waiting for the task: (returnval){ [ 601.868276] env[63538]: value = "task-5100424" [ 601.868276] env[63538]: _type = "Task" [ 601.868276] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.882870] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Task: {'id': task-5100424, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.931771] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Acquiring lock "d99b7b8e-633f-4fba-bce6-9b8e9e9892d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.932154] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Lock "d99b7b8e-633f-4fba-bce6-9b8e9e9892d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.932346] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Acquiring lock "d99b7b8e-633f-4fba-bce6-9b8e9e9892d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.932541] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Lock "d99b7b8e-633f-4fba-bce6-9b8e9e9892d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.933107] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Lock "d99b7b8e-633f-4fba-bce6-9b8e9e9892d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 601.935789] env[63538]: INFO nova.compute.manager [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Terminating instance [ 601.938557] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Acquiring lock "refresh_cache-d99b7b8e-633f-4fba-bce6-9b8e9e9892d1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 601.938675] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Acquired lock "refresh_cache-d99b7b8e-633f-4fba-bce6-9b8e9e9892d1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.938823] env[63538]: DEBUG nova.network.neutron [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 601.975198] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100420, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.168892] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa8d88d-4aa7-41c5-ba9f-2b3995c018b0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.179634] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b96702bc-eb43-46b6-b904-32cb2fd1e6ff {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.224125] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dccd8d8a-04dc-480a-93b9-82eeba6b0ec6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.242872] env[63538]: DEBUG oslo_vmware.api [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Task: {'id': task-5100422, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.244554] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adf9c93e-9636-4766-99cf-0fd4adec96fa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.267808] env[63538]: DEBUG nova.compute.provider_tree [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 602.294174] env[63538]: DEBUG nova.compute.manager [req-a38c472a-9f1b-460d-89e4-900b41ae549b req-6652f97c-65a1-4c0c-84be-1f4b6e737b84 service nova] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Received event network-vif-plugged-ac528a58-339f-4621-890a-afe5e3ee634d {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 602.294174] env[63538]: DEBUG oslo_concurrency.lockutils [req-a38c472a-9f1b-460d-89e4-900b41ae549b req-6652f97c-65a1-4c0c-84be-1f4b6e737b84 service nova] Acquiring lock "36d40b69-fae7-4867-afa1-4befdc96bde0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.294616] env[63538]: DEBUG oslo_concurrency.lockutils [req-a38c472a-9f1b-460d-89e4-900b41ae549b req-6652f97c-65a1-4c0c-84be-1f4b6e737b84 service nova] Lock "36d40b69-fae7-4867-afa1-4befdc96bde0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.294616] env[63538]: DEBUG oslo_concurrency.lockutils [req-a38c472a-9f1b-460d-89e4-900b41ae549b req-6652f97c-65a1-4c0c-84be-1f4b6e737b84 service nova] Lock "36d40b69-fae7-4867-afa1-4befdc96bde0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.294949] env[63538]: DEBUG nova.compute.manager [req-a38c472a-9f1b-460d-89e4-900b41ae549b req-6652f97c-65a1-4c0c-84be-1f4b6e737b84 service nova] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] No waiting events found dispatching network-vif-plugged-ac528a58-339f-4621-890a-afe5e3ee634d {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 602.295193] env[63538]: WARNING nova.compute.manager [req-a38c472a-9f1b-460d-89e4-900b41ae549b req-6652f97c-65a1-4c0c-84be-1f4b6e737b84 service nova] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Received unexpected event network-vif-plugged-ac528a58-339f-4621-890a-afe5e3ee634d for instance with vm_state building and task_state spawning. [ 602.295193] env[63538]: DEBUG nova.compute.manager [req-a38c472a-9f1b-460d-89e4-900b41ae549b req-6652f97c-65a1-4c0c-84be-1f4b6e737b84 service nova] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Received event network-changed-ac528a58-339f-4621-890a-afe5e3ee634d {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 602.295340] env[63538]: DEBUG nova.compute.manager [req-a38c472a-9f1b-460d-89e4-900b41ae549b req-6652f97c-65a1-4c0c-84be-1f4b6e737b84 service nova] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Refreshing instance network info cache due to event network-changed-ac528a58-339f-4621-890a-afe5e3ee634d. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 602.295490] env[63538]: DEBUG oslo_concurrency.lockutils [req-a38c472a-9f1b-460d-89e4-900b41ae549b req-6652f97c-65a1-4c0c-84be-1f4b6e737b84 service nova] Acquiring lock "refresh_cache-36d40b69-fae7-4867-afa1-4befdc96bde0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.295579] env[63538]: DEBUG oslo_concurrency.lockutils [req-a38c472a-9f1b-460d-89e4-900b41ae549b req-6652f97c-65a1-4c0c-84be-1f4b6e737b84 service nova] Acquired lock "refresh_cache-36d40b69-fae7-4867-afa1-4befdc96bde0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.295728] env[63538]: DEBUG nova.network.neutron [req-a38c472a-9f1b-460d-89e4-900b41ae549b req-6652f97c-65a1-4c0c-84be-1f4b6e737b84 service nova] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Refreshing network info cache for port ac528a58-339f-4621-890a-afe5e3ee634d {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 602.351471] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100423, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.378305] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Task: {'id': task-5100424, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.28324} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.378305] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 602.378985] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a761e9f2-5811-4a99-8e46-a254e7b6fd5b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.408454] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf/10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 602.408800] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aef1c011-5267-4c80-9e9c-f995dc90374e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.431907] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Waiting for the task: (returnval){ [ 602.431907] env[63538]: value = "task-5100426" [ 602.431907] env[63538]: _type = "Task" [ 602.431907] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.444040] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Task: {'id': task-5100426, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.469052] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100420, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.660084} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.470987] env[63538]: DEBUG nova.network.neutron [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 602.472187] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 36d40b69-fae7-4867-afa1-4befdc96bde0/36d40b69-fae7-4867-afa1-4befdc96bde0.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 602.472670] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 602.472955] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-398cb90a-e596-4156-a03e-cdf2ddc67385 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.483021] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Waiting for the task: (returnval){ [ 602.483021] env[63538]: value = "task-5100427" [ 602.483021] env[63538]: _type = "Task" [ 602.483021] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.494193] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100427, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.562493] env[63538]: DEBUG nova.compute.manager [req-50c96cc3-9bc1-4fa5-9c4c-4d4eeb80d1c7 req-f8dc491c-e04e-4d98-bd05-281ee0fff025 service nova] [instance: e1710498-0616-4862-afc0-6e452dc19882] Received event network-vif-deleted-9bd783f7-7fb0-4c77-923b-34206070a65c {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 602.598792] env[63538]: DEBUG nova.network.neutron [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.730550] env[63538]: DEBUG oslo_vmware.api [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Task: {'id': task-5100422, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.742388} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.730843] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 602.731044] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 602.731299] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 602.731560] env[63538]: INFO nova.compute.manager [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Took 1.70 seconds to destroy the instance on the hypervisor. [ 602.731935] env[63538]: DEBUG oslo.service.loopingcall [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 602.732238] env[63538]: DEBUG nova.compute.manager [-] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 602.732360] env[63538]: DEBUG nova.network.neutron [-] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 602.745733] env[63538]: DEBUG nova.compute.manager [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 602.774380] env[63538]: DEBUG nova.scheduler.client.report [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 602.785415] env[63538]: DEBUG nova.virt.hardware [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 602.785664] env[63538]: DEBUG nova.virt.hardware [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 602.785820] env[63538]: DEBUG nova.virt.hardware [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 602.786014] env[63538]: DEBUG nova.virt.hardware [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 602.786362] env[63538]: DEBUG nova.virt.hardware [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 602.786520] env[63538]: DEBUG nova.virt.hardware [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 602.786731] env[63538]: DEBUG nova.virt.hardware [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 602.786966] env[63538]: DEBUG nova.virt.hardware [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 602.787172] env[63538]: DEBUG nova.virt.hardware [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 602.787342] env[63538]: DEBUG nova.virt.hardware [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 602.787515] env[63538]: DEBUG nova.virt.hardware [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 602.788500] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7367713-defd-48ea-abe9-e24af7e4f84a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.797324] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eff46514-b89a-4fcc-9070-3e59032e0e21 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.855491] env[63538]: DEBUG oslo_vmware.api [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100423, 'name': PowerOnVM_Task, 'duration_secs': 0.972335} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.856077] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 602.858021] env[63538]: INFO nova.compute.manager [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Took 13.99 seconds to spawn the instance on the hypervisor. [ 602.858021] env[63538]: DEBUG nova.compute.manager [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 602.858021] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06992263-e09c-41a1-b426-f6043903f90a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.944112] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Task: {'id': task-5100426, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.994205] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100427, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.106836} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.994514] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 602.995396] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1789ccba-a770-45a2-b6f8-40225cb84bf2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.020913] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] 36d40b69-fae7-4867-afa1-4befdc96bde0/36d40b69-fae7-4867-afa1-4befdc96bde0.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 603.021067] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-64de1781-1078-4fb2-809b-c1fae6f999ab {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.050396] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Waiting for the task: (returnval){ [ 603.050396] env[63538]: value = "task-5100428" [ 603.050396] env[63538]: _type = "Task" [ 603.050396] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.061335] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100428, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.102848] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Releasing lock "refresh_cache-d99b7b8e-633f-4fba-bce6-9b8e9e9892d1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.102848] env[63538]: DEBUG nova.compute.manager [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 603.102848] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 603.103735] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65af290e-3379-480b-aba3-9f640e3c5626 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.113372] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 603.114032] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf138959-d361-4d57-a880-2602ce446253 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.123969] env[63538]: DEBUG oslo_vmware.api [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Waiting for the task: (returnval){ [ 603.123969] env[63538]: value = "task-5100429" [ 603.123969] env[63538]: _type = "Task" [ 603.123969] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.135244] env[63538]: DEBUG oslo_vmware.api [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Task: {'id': task-5100429, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.280303] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.578s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.281388] env[63538]: DEBUG nova.compute.manager [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 603.285230] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.315s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.285996] env[63538]: DEBUG nova.objects.instance [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Lazy-loading 'resources' on Instance uuid 15a8424e-27a6-4b77-b57c-d163345b8fed {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 603.369064] env[63538]: DEBUG nova.network.neutron [req-a38c472a-9f1b-460d-89e4-900b41ae549b req-6652f97c-65a1-4c0c-84be-1f4b6e737b84 service nova] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Updated VIF entry in instance network info cache for port ac528a58-339f-4621-890a-afe5e3ee634d. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 603.369433] env[63538]: DEBUG nova.network.neutron [req-a38c472a-9f1b-460d-89e4-900b41ae549b req-6652f97c-65a1-4c0c-84be-1f4b6e737b84 service nova] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Updating instance_info_cache with network_info: [{"id": "ac528a58-339f-4621-890a-afe5e3ee634d", "address": "fa:16:3e:ec:27:93", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.143", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac528a58-33", "ovs_interfaceid": "ac528a58-339f-4621-890a-afe5e3ee634d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.381752] env[63538]: INFO nova.compute.manager [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Took 21.64 seconds to build instance. [ 603.445893] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Task: {'id': task-5100426, 'name': ReconfigVM_Task, 'duration_secs': 0.958416} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.445893] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Reconfigured VM instance instance-0000000c to attach disk [datastore1] 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf/10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 603.445893] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0a3754ca-a0bf-4970-9109-d81ee7c74280 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.452309] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Waiting for the task: (returnval){ [ 603.452309] env[63538]: value = "task-5100430" [ 603.452309] env[63538]: _type = "Task" [ 603.452309] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.462684] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Task: {'id': task-5100430, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.562481] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100428, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.614703] env[63538]: DEBUG nova.network.neutron [-] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.641088] env[63538]: DEBUG oslo_vmware.api [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Task: {'id': task-5100429, 'name': PowerOffVM_Task, 'duration_secs': 0.212824} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.641088] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 603.641449] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 603.641549] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f0a38189-f87b-4139-9787-fe37e2f5f96a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.675087] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 603.675087] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 603.675087] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Deleting the datastore file [datastore1] d99b7b8e-633f-4fba-bce6-9b8e9e9892d1 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 603.675087] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8bb74ddd-9997-492b-800e-ee19c4d9046e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.685633] env[63538]: DEBUG oslo_vmware.api [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Waiting for the task: (returnval){ [ 603.685633] env[63538]: value = "task-5100432" [ 603.685633] env[63538]: _type = "Task" [ 603.685633] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.701406] env[63538]: DEBUG oslo_vmware.api [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Task: {'id': task-5100432, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.702070] env[63538]: DEBUG nova.compute.manager [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Stashing vm_state: active {{(pid=63538) _prep_resize /opt/stack/nova/nova/compute/manager.py:5670}} [ 603.794124] env[63538]: DEBUG nova.compute.utils [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 603.795759] env[63538]: DEBUG nova.compute.manager [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 603.796147] env[63538]: DEBUG nova.network.neutron [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 603.867695] env[63538]: DEBUG nova.network.neutron [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Successfully updated port: 33b2bb84-c893-4ee8-90de-6696ef21d830 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 603.875825] env[63538]: DEBUG oslo_concurrency.lockutils [req-a38c472a-9f1b-460d-89e4-900b41ae549b req-6652f97c-65a1-4c0c-84be-1f4b6e737b84 service nova] Releasing lock "refresh_cache-36d40b69-fae7-4867-afa1-4befdc96bde0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.882877] env[63538]: DEBUG nova.policy [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9a5b18ded69f40bab03d546142bc4517', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e6480e3bc216427d939223b9e3b6a21b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 603.884643] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b9b90b69-0813-47c2-8b80-6761383f3644 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "f9fa5578-acf3-416f-9cb0-8ceb00e5132d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.212s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.971126] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Task: {'id': task-5100430, 'name': Rename_Task, 'duration_secs': 0.394485} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.979688] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 603.979688] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27297733-cd30-4213-8d85-2a67297652ef {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.991254] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Waiting for the task: (returnval){ [ 603.991254] env[63538]: value = "task-5100433" [ 603.991254] env[63538]: _type = "Task" [ 603.991254] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.021223] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Task: {'id': task-5100433, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.070270] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100428, 'name': ReconfigVM_Task, 'duration_secs': 0.831285} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.070418] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Reconfigured VM instance instance-0000000d to attach disk [datastore1] 36d40b69-fae7-4867-afa1-4befdc96bde0/36d40b69-fae7-4867-afa1-4befdc96bde0.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 604.071133] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1e8b7b24-202c-48a5-b063-0b5107a9b002 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.083590] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Waiting for the task: (returnval){ [ 604.083590] env[63538]: value = "task-5100435" [ 604.083590] env[63538]: _type = "Task" [ 604.083590] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.109350] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100435, 'name': Rename_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.117601] env[63538]: INFO nova.compute.manager [-] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Took 1.38 seconds to deallocate network for instance. [ 604.204032] env[63538]: DEBUG oslo_vmware.api [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Task: {'id': task-5100432, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.248357} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.204337] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 604.204535] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 604.204709] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 604.204905] env[63538]: INFO nova.compute.manager [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Took 1.10 seconds to destroy the instance on the hypervisor. [ 604.205297] env[63538]: DEBUG oslo.service.loopingcall [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 604.205503] env[63538]: DEBUG nova.compute.manager [-] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 604.205598] env[63538]: DEBUG nova.network.neutron [-] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 604.235845] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.246837] env[63538]: DEBUG nova.network.neutron [-] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 604.298449] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76869a13-26fa-4c14-b81f-b89f065e2123 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.303589] env[63538]: DEBUG nova.compute.manager [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 604.312916] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f09a4be9-19dc-4f55-86b8-0dae987e3b33 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.353469] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb43bf63-0516-4829-ad0f-238b65f000fd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.362288] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e92418-3a8b-497f-9daa-d46b748e319c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.385435] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Acquiring lock "refresh_cache-bf54098e-91a8-403f-a6fe-b58a62daaadb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 604.385643] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Acquired lock "refresh_cache-bf54098e-91a8-403f-a6fe-b58a62daaadb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.385868] env[63538]: DEBUG nova.network.neutron [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 604.391025] env[63538]: DEBUG nova.compute.provider_tree [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 604.502551] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Task: {'id': task-5100433, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.512308] env[63538]: DEBUG nova.network.neutron [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Successfully created port: 514b9391-5894-4419-800a-e06658f8a44b {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 604.601404] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100435, 'name': Rename_Task, 'duration_secs': 0.262634} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.601781] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 604.603255] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f730722-6928-41d5-b816-ace31227dc20 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.614401] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Waiting for the task: (returnval){ [ 604.614401] env[63538]: value = "task-5100436" [ 604.614401] env[63538]: _type = "Task" [ 604.614401] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.626912] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100436, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.630372] env[63538]: DEBUG oslo_concurrency.lockutils [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.688036] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Acquiring lock "e3ba860b-afb8-4843-9d99-049dce205f9f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.689308] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Lock "e3ba860b-afb8-4843-9d99-049dce205f9f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.689308] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Acquiring lock "e3ba860b-afb8-4843-9d99-049dce205f9f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.689308] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Lock "e3ba860b-afb8-4843-9d99-049dce205f9f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.689308] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Lock "e3ba860b-afb8-4843-9d99-049dce205f9f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 604.691978] env[63538]: INFO nova.compute.manager [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Terminating instance [ 604.694555] env[63538]: DEBUG nova.compute.manager [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 604.695080] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 604.695776] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad16b9b-fff7-437e-8a35-796a00f90c1a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.704940] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 604.706262] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d69af9e-7835-4685-a774-0e230134e674 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.714687] env[63538]: DEBUG oslo_vmware.api [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Waiting for the task: (returnval){ [ 604.714687] env[63538]: value = "task-5100437" [ 604.714687] env[63538]: _type = "Task" [ 604.714687] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.727979] env[63538]: DEBUG oslo_vmware.api [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Task: {'id': task-5100437, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.750261] env[63538]: DEBUG nova.network.neutron [-] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.891809] env[63538]: DEBUG nova.scheduler.client.report [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 604.988398] env[63538]: DEBUG nova.network.neutron [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 605.005984] env[63538]: DEBUG oslo_vmware.api [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Task: {'id': task-5100433, 'name': PowerOnVM_Task, 'duration_secs': 0.837574} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.006114] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 605.006578] env[63538]: INFO nova.compute.manager [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Took 12.37 seconds to spawn the instance on the hypervisor. [ 605.006657] env[63538]: DEBUG nova.compute.manager [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 605.007801] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27fc234b-23a8-417e-8dda-651ebc8ec255 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.126907] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100436, 'name': PowerOnVM_Task} progress is 87%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.231906] env[63538]: DEBUG oslo_vmware.api [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Task: {'id': task-5100437, 'name': PowerOffVM_Task, 'duration_secs': 0.320997} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.235172] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 605.235172] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 605.235417] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef982306-846a-4263-aa59-cfc75b5132b8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.253158] env[63538]: INFO nova.compute.manager [-] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Took 1.05 seconds to deallocate network for instance. [ 605.290902] env[63538]: DEBUG nova.network.neutron [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Updating instance_info_cache with network_info: [{"id": "33b2bb84-c893-4ee8-90de-6696ef21d830", "address": "fa:16:3e:8e:14:ec", "network": {"id": "2af5d5f0-f120-4850-bd33-40951b8bbe31", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1240280342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22554cf5d2194573ba27c6236d2c3ad2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33b2bb84-c8", "ovs_interfaceid": "33b2bb84-c893-4ee8-90de-6696ef21d830", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.315975] env[63538]: DEBUG nova.compute.manager [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 605.321802] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 605.322138] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 605.322273] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Deleting the datastore file [datastore2] e3ba860b-afb8-4843-9d99-049dce205f9f {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 605.324602] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9104ac17-40b0-449b-b2a6-20ba4771b405 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.336379] env[63538]: DEBUG oslo_vmware.api [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Waiting for the task: (returnval){ [ 605.336379] env[63538]: value = "task-5100439" [ 605.336379] env[63538]: _type = "Task" [ 605.336379] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.353142] env[63538]: DEBUG oslo_vmware.api [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Task: {'id': task-5100439, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.357450] env[63538]: DEBUG nova.virt.hardware [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:50:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='717895813',id=30,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-945516049',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 605.357717] env[63538]: DEBUG nova.virt.hardware [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 605.357877] env[63538]: DEBUG nova.virt.hardware [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 605.358151] env[63538]: DEBUG nova.virt.hardware [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 605.358404] env[63538]: DEBUG nova.virt.hardware [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 605.358453] env[63538]: DEBUG nova.virt.hardware [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 605.358647] env[63538]: DEBUG nova.virt.hardware [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 605.358814] env[63538]: DEBUG nova.virt.hardware [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 605.358983] env[63538]: DEBUG nova.virt.hardware [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 605.359144] env[63538]: DEBUG nova.virt.hardware [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 605.359326] env[63538]: DEBUG nova.virt.hardware [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 605.360215] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715ac6d0-32aa-46c1-928d-7c0328eabcca {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.370361] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ca95d0-e28f-4b42-8df2-6d0c8d3983e3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.402148] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.116s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.406365] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.997s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.407366] env[63538]: DEBUG nova.objects.instance [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] Lazy-loading 'resources' on Instance uuid 4b8fb9ad-a366-423d-81b1-04c5e4ec9264 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 605.450087] env[63538]: INFO nova.scheduler.client.report [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Deleted allocations for instance 15a8424e-27a6-4b77-b57c-d163345b8fed [ 605.536950] env[63538]: INFO nova.compute.manager [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Took 23.14 seconds to build instance. [ 605.631135] env[63538]: DEBUG oslo_vmware.api [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100436, 'name': PowerOnVM_Task, 'duration_secs': 0.934925} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.631135] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 605.631135] env[63538]: INFO nova.compute.manager [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Took 10.42 seconds to spawn the instance on the hypervisor. [ 605.631135] env[63538]: DEBUG nova.compute.manager [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 605.632021] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8992b6dd-c510-4188-84d9-1048a4eacec5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.762602] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.799021] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Releasing lock "refresh_cache-bf54098e-91a8-403f-a6fe-b58a62daaadb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 605.799736] env[63538]: DEBUG nova.compute.manager [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Instance network_info: |[{"id": "33b2bb84-c893-4ee8-90de-6696ef21d830", "address": "fa:16:3e:8e:14:ec", "network": {"id": "2af5d5f0-f120-4850-bd33-40951b8bbe31", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1240280342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22554cf5d2194573ba27c6236d2c3ad2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33b2bb84-c8", "ovs_interfaceid": "33b2bb84-c893-4ee8-90de-6696ef21d830", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 605.800500] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:14:ec', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8c58d99d-ec12-4fc3-ab39-042b3f8cbb89', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '33b2bb84-c893-4ee8-90de-6696ef21d830', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 605.814492] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Creating folder: Project (22554cf5d2194573ba27c6236d2c3ad2). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 605.815040] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6b4a58d4-09ec-4706-ad75-645cbdbc819d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.829905] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Created folder: Project (22554cf5d2194573ba27c6236d2c3ad2) in parent group-v992234. [ 605.829905] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Creating folder: Instances. Parent ref: group-v992275. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 605.830076] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-883d2a9f-8cee-4694-888e-92d2a408f8b0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.845678] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Created folder: Instances in parent group-v992275. [ 605.846027] env[63538]: DEBUG oslo.service.loopingcall [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 605.851541] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 605.851936] env[63538]: DEBUG oslo_vmware.api [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Task: {'id': task-5100439, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.303545} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.852212] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a2198638-c2c6-41d1-b6a1-e1002de0e1d0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.874743] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 605.874958] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 605.875088] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 605.875284] env[63538]: INFO nova.compute.manager [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Took 1.18 seconds to destroy the instance on the hypervisor. [ 605.875536] env[63538]: DEBUG oslo.service.loopingcall [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 605.875718] env[63538]: DEBUG nova.compute.manager [-] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 605.876896] env[63538]: DEBUG nova.network.neutron [-] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 605.883611] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 605.883611] env[63538]: value = "task-5100442" [ 605.883611] env[63538]: _type = "Task" [ 605.883611] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.893090] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100442, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.966602] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0cd7e978-7633-4de5-9a7b-a133d8bd3ae4 tempest-ServerDiagnosticsNegativeTest-1134525262 tempest-ServerDiagnosticsNegativeTest-1134525262-project-member] Lock "15a8424e-27a6-4b77-b57c-d163345b8fed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.489s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 606.040568] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed68ef16-59b2-4ed3-8a33-09b11fbf215c tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Lock "10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.106s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 606.056504] env[63538]: DEBUG oslo_concurrency.lockutils [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "bd222761-92aa-4f2c-a752-ead9c498ee7a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.056906] env[63538]: DEBUG oslo_concurrency.lockutils [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "bd222761-92aa-4f2c-a752-ead9c498ee7a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.004s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.163210] env[63538]: INFO nova.compute.manager [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Took 23.68 seconds to build instance. [ 606.383389] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f8fe1c1-3463-40af-8a5b-8510f9b1b030 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.403192] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1132ada2-a57d-47a8-975c-ba9fb25dd9bd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.406948] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100442, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.440978] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d28fe8e-5947-4ed7-8757-960ecff76cc8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.449891] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212a4e99-ccf2-4119-b8e1-8c0a6e0631d8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.466266] env[63538]: DEBUG nova.compute.provider_tree [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 606.566762] env[63538]: DEBUG nova.compute.manager [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 606.604743] env[63538]: DEBUG nova.compute.manager [req-60e95de2-826a-484b-ae3d-da9373ffa1f5 req-86f0f4ed-a7a6-4824-a1bc-bce10c6f3de5 service nova] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Received event network-vif-plugged-33b2bb84-c893-4ee8-90de-6696ef21d830 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 606.605042] env[63538]: DEBUG oslo_concurrency.lockutils [req-60e95de2-826a-484b-ae3d-da9373ffa1f5 req-86f0f4ed-a7a6-4824-a1bc-bce10c6f3de5 service nova] Acquiring lock "bf54098e-91a8-403f-a6fe-b58a62daaadb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.606061] env[63538]: DEBUG oslo_concurrency.lockutils [req-60e95de2-826a-484b-ae3d-da9373ffa1f5 req-86f0f4ed-a7a6-4824-a1bc-bce10c6f3de5 service nova] Lock "bf54098e-91a8-403f-a6fe-b58a62daaadb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.606061] env[63538]: DEBUG oslo_concurrency.lockutils [req-60e95de2-826a-484b-ae3d-da9373ffa1f5 req-86f0f4ed-a7a6-4824-a1bc-bce10c6f3de5 service nova] Lock "bf54098e-91a8-403f-a6fe-b58a62daaadb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 606.606061] env[63538]: DEBUG nova.compute.manager [req-60e95de2-826a-484b-ae3d-da9373ffa1f5 req-86f0f4ed-a7a6-4824-a1bc-bce10c6f3de5 service nova] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] No waiting events found dispatching network-vif-plugged-33b2bb84-c893-4ee8-90de-6696ef21d830 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 606.606259] env[63538]: WARNING nova.compute.manager [req-60e95de2-826a-484b-ae3d-da9373ffa1f5 req-86f0f4ed-a7a6-4824-a1bc-bce10c6f3de5 service nova] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Received unexpected event network-vif-plugged-33b2bb84-c893-4ee8-90de-6696ef21d830 for instance with vm_state building and task_state spawning. [ 606.607300] env[63538]: DEBUG nova.compute.manager [req-60e95de2-826a-484b-ae3d-da9373ffa1f5 req-86f0f4ed-a7a6-4824-a1bc-bce10c6f3de5 service nova] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Received event network-changed-33b2bb84-c893-4ee8-90de-6696ef21d830 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 606.607552] env[63538]: DEBUG nova.compute.manager [req-60e95de2-826a-484b-ae3d-da9373ffa1f5 req-86f0f4ed-a7a6-4824-a1bc-bce10c6f3de5 service nova] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Refreshing instance network info cache due to event network-changed-33b2bb84-c893-4ee8-90de-6696ef21d830. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 606.610272] env[63538]: DEBUG oslo_concurrency.lockutils [req-60e95de2-826a-484b-ae3d-da9373ffa1f5 req-86f0f4ed-a7a6-4824-a1bc-bce10c6f3de5 service nova] Acquiring lock "refresh_cache-bf54098e-91a8-403f-a6fe-b58a62daaadb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.610272] env[63538]: DEBUG oslo_concurrency.lockutils [req-60e95de2-826a-484b-ae3d-da9373ffa1f5 req-86f0f4ed-a7a6-4824-a1bc-bce10c6f3de5 service nova] Acquired lock "refresh_cache-bf54098e-91a8-403f-a6fe-b58a62daaadb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.610272] env[63538]: DEBUG nova.network.neutron [req-60e95de2-826a-484b-ae3d-da9373ffa1f5 req-86f0f4ed-a7a6-4824-a1bc-bce10c6f3de5 service nova] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Refreshing network info cache for port 33b2bb84-c893-4ee8-90de-6696ef21d830 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 606.617937] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquiring lock "c065263a-fd40-4b44-a68e-0e03248d0bc0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.618073] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "c065263a-fd40-4b44-a68e-0e03248d0bc0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.666441] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0fc0bb68-227a-4131-b5ec-0dd263f08656 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Lock "36d40b69-fae7-4867-afa1-4befdc96bde0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.610s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 606.857879] env[63538]: DEBUG nova.compute.manager [req-fa85abff-0194-4af9-b60f-702e523fd11f req-7f209bbc-06bb-43f1-93c8-8ecf8f0fa433 service nova] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Received event network-vif-deleted-5e9b18aa-a332-400c-9c74-aed76633d8b5 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 606.902694] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100442, 'name': CreateVM_Task, 'duration_secs': 0.5312} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.902913] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 606.903757] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.903920] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.904306] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 606.904582] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1b72e95-f856-4fd9-a7bb-0507315cdab9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.911400] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Waiting for the task: (returnval){ [ 606.911400] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c9ecd8-e313-331d-c4a7-be80de3ffc2e" [ 606.911400] env[63538]: _type = "Task" [ 606.911400] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.925190] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c9ecd8-e313-331d-c4a7-be80de3ffc2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.969768] env[63538]: DEBUG nova.scheduler.client.report [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 607.097284] env[63538]: DEBUG oslo_concurrency.lockutils [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.123229] env[63538]: DEBUG nova.compute.manager [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 607.203616] env[63538]: DEBUG nova.network.neutron [-] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.342936] env[63538]: DEBUG nova.network.neutron [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Successfully updated port: 514b9391-5894-4419-800a-e06658f8a44b {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 607.426556] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c9ecd8-e313-331d-c4a7-be80de3ffc2e, 'name': SearchDatastore_Task, 'duration_secs': 0.035682} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.426820] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 607.427152] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 607.427256] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.427337] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.427515] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 607.427941] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a78e1f39-ae90-471e-a719-e9a7281872f0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.445490] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 607.445692] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 607.446737] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74d8e413-c229-4ed2-80d8-c06f0e967f40 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.456271] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Waiting for the task: (returnval){ [ 607.456271] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52701bce-3dca-6376-74d8-4cb129e42cfb" [ 607.456271] env[63538]: _type = "Task" [ 607.456271] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.467266] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52701bce-3dca-6376-74d8-4cb129e42cfb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.475326] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.069s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 607.481450] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.084s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.485391] env[63538]: INFO nova.compute.claims [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 607.526306] env[63538]: INFO nova.scheduler.client.report [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] Deleted allocations for instance 4b8fb9ad-a366-423d-81b1-04c5e4ec9264 [ 607.658707] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.662642] env[63538]: DEBUG nova.network.neutron [req-60e95de2-826a-484b-ae3d-da9373ffa1f5 req-86f0f4ed-a7a6-4824-a1bc-bce10c6f3de5 service nova] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Updated VIF entry in instance network info cache for port 33b2bb84-c893-4ee8-90de-6696ef21d830. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 607.662642] env[63538]: DEBUG nova.network.neutron [req-60e95de2-826a-484b-ae3d-da9373ffa1f5 req-86f0f4ed-a7a6-4824-a1bc-bce10c6f3de5 service nova] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Updating instance_info_cache with network_info: [{"id": "33b2bb84-c893-4ee8-90de-6696ef21d830", "address": "fa:16:3e:8e:14:ec", "network": {"id": "2af5d5f0-f120-4850-bd33-40951b8bbe31", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1240280342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22554cf5d2194573ba27c6236d2c3ad2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33b2bb84-c8", "ovs_interfaceid": "33b2bb84-c893-4ee8-90de-6696ef21d830", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.710742] env[63538]: INFO nova.compute.manager [-] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Took 1.83 seconds to deallocate network for instance. [ 607.845844] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Acquiring lock "refresh_cache-1e33b68e-8509-4ec4-8ec4-dc758aae9a5a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.847888] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Acquired lock "refresh_cache-1e33b68e-8509-4ec4-8ec4-dc758aae9a5a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.847888] env[63538]: DEBUG nova.network.neutron [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 607.947057] env[63538]: DEBUG nova.compute.manager [req-b4cc3d25-37de-4527-99a3-a38b7cdf0d82 req-6ff5ffaf-7a83-41f9-b771-4062bf420584 service nova] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Received event network-vif-plugged-514b9391-5894-4419-800a-e06658f8a44b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 607.947057] env[63538]: DEBUG oslo_concurrency.lockutils [req-b4cc3d25-37de-4527-99a3-a38b7cdf0d82 req-6ff5ffaf-7a83-41f9-b771-4062bf420584 service nova] Acquiring lock "1e33b68e-8509-4ec4-8ec4-dc758aae9a5a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.947057] env[63538]: DEBUG oslo_concurrency.lockutils [req-b4cc3d25-37de-4527-99a3-a38b7cdf0d82 req-6ff5ffaf-7a83-41f9-b771-4062bf420584 service nova] Lock "1e33b68e-8509-4ec4-8ec4-dc758aae9a5a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.947057] env[63538]: DEBUG oslo_concurrency.lockutils [req-b4cc3d25-37de-4527-99a3-a38b7cdf0d82 req-6ff5ffaf-7a83-41f9-b771-4062bf420584 service nova] Lock "1e33b68e-8509-4ec4-8ec4-dc758aae9a5a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 607.947057] env[63538]: DEBUG nova.compute.manager [req-b4cc3d25-37de-4527-99a3-a38b7cdf0d82 req-6ff5ffaf-7a83-41f9-b771-4062bf420584 service nova] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] No waiting events found dispatching network-vif-plugged-514b9391-5894-4419-800a-e06658f8a44b {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 607.947503] env[63538]: WARNING nova.compute.manager [req-b4cc3d25-37de-4527-99a3-a38b7cdf0d82 req-6ff5ffaf-7a83-41f9-b771-4062bf420584 service nova] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Received unexpected event network-vif-plugged-514b9391-5894-4419-800a-e06658f8a44b for instance with vm_state building and task_state spawning. [ 607.968503] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52701bce-3dca-6376-74d8-4cb129e42cfb, 'name': SearchDatastore_Task, 'duration_secs': 0.017459} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.969508] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a49bdd2-ebd5-400f-b887-0825342dbba4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.978517] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Waiting for the task: (returnval){ [ 607.978517] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524ca606-2cfc-c625-514d-6c1da26821f7" [ 607.978517] env[63538]: _type = "Task" [ 607.978517] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.997493] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524ca606-2cfc-c625-514d-6c1da26821f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.037392] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2df4f009-417d-4434-8c06-a075e15381ed tempest-DeleteServersAdminTestJSON-109023497 tempest-DeleteServersAdminTestJSON-109023497-project-admin] Lock "4b8fb9ad-a366-423d-81b1-04c5e4ec9264" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.838s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 608.165218] env[63538]: DEBUG oslo_concurrency.lockutils [req-60e95de2-826a-484b-ae3d-da9373ffa1f5 req-86f0f4ed-a7a6-4824-a1bc-bce10c6f3de5 service nova] Releasing lock "refresh_cache-bf54098e-91a8-403f-a6fe-b58a62daaadb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.216954] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.400924] env[63538]: DEBUG nova.network.neutron [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 608.492570] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524ca606-2cfc-c625-514d-6c1da26821f7, 'name': SearchDatastore_Task, 'duration_secs': 0.014685} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.493239] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.494575] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] bf54098e-91a8-403f-a6fe-b58a62daaadb/bf54098e-91a8-403f-a6fe-b58a62daaadb.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 608.494908] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c9431514-f6c8-4ac0-b496-f360b081a457 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.513749] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Waiting for the task: (returnval){ [ 608.513749] env[63538]: value = "task-5100445" [ 608.513749] env[63538]: _type = "Task" [ 608.513749] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.526648] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Task: {'id': task-5100445, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.717548] env[63538]: DEBUG nova.network.neutron [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Updating instance_info_cache with network_info: [{"id": "514b9391-5894-4419-800a-e06658f8a44b", "address": "fa:16:3e:c5:61:96", "network": {"id": "c0e827ce-4abc-4178-9811-36625c1d6ebc", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1894083828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6480e3bc216427d939223b9e3b6a21b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69351262-8d39-441a-85ba-3a78df436d17", "external-id": "nsx-vlan-transportzone-205", "segmentation_id": 205, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap514b9391-58", "ovs_interfaceid": "514b9391-5894-4419-800a-e06658f8a44b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.959044] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a4d1786-e45f-47dd-a712-dbef8f8ffd8e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.969776] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b58a3261-083d-4cdd-ba50-ca0cf35a3bc2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.011056] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cac9d7e-5e66-4037-8732-f69f993b3768 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.029458] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59cbc6e7-3624-4e31-8467-08a4fc3269e6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.034485] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Task: {'id': task-5100445, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.047529] env[63538]: DEBUG nova.compute.provider_tree [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 609.222165] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Releasing lock "refresh_cache-1e33b68e-8509-4ec4-8ec4-dc758aae9a5a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.222988] env[63538]: DEBUG nova.compute.manager [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Instance network_info: |[{"id": "514b9391-5894-4419-800a-e06658f8a44b", "address": "fa:16:3e:c5:61:96", "network": {"id": "c0e827ce-4abc-4178-9811-36625c1d6ebc", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1894083828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6480e3bc216427d939223b9e3b6a21b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69351262-8d39-441a-85ba-3a78df436d17", "external-id": "nsx-vlan-transportzone-205", "segmentation_id": 205, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap514b9391-58", "ovs_interfaceid": "514b9391-5894-4419-800a-e06658f8a44b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 609.223137] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:61:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69351262-8d39-441a-85ba-3a78df436d17', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '514b9391-5894-4419-800a-e06658f8a44b', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 609.230647] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Creating folder: Project (e6480e3bc216427d939223b9e3b6a21b). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 609.230939] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-85fed448-c8af-4a1b-b277-35edaf78c5eb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.243279] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Created folder: Project (e6480e3bc216427d939223b9e3b6a21b) in parent group-v992234. [ 609.243562] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Creating folder: Instances. Parent ref: group-v992279. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 609.243744] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cdee70ba-4fc8-4333-9e89-02dbcbf8208a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.254759] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Created folder: Instances in parent group-v992279. [ 609.255048] env[63538]: DEBUG oslo.service.loopingcall [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 609.257311] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 609.257311] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-59b02aac-0602-449e-8a24-4984cdd724f0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.277246] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 609.277246] env[63538]: value = "task-5100448" [ 609.277246] env[63538]: _type = "Task" [ 609.277246] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.286412] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100448, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.542923] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Task: {'id': task-5100445, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.576205} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.544596] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] bf54098e-91a8-403f-a6fe-b58a62daaadb/bf54098e-91a8-403f-a6fe-b58a62daaadb.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 609.544805] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 609.544980] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7683c272-3306-42fc-b0ed-42e1813b829f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.551055] env[63538]: DEBUG nova.scheduler.client.report [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 609.558146] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Waiting for the task: (returnval){ [ 609.558146] env[63538]: value = "task-5100449" [ 609.558146] env[63538]: _type = "Task" [ 609.558146] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.576360] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Task: {'id': task-5100449, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.610494] env[63538]: DEBUG nova.compute.manager [req-60cd2691-e3ad-43a2-979a-069715abad87 req-fb4b19db-1333-4bc4-8cb4-e19483854165 service nova] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Received event network-vif-deleted-2aa9f276-7b52-46d9-9394-e3be1142ea88 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 609.611093] env[63538]: DEBUG nova.compute.manager [req-60cd2691-e3ad-43a2-979a-069715abad87 req-fb4b19db-1333-4bc4-8cb4-e19483854165 service nova] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Received event network-changed-514b9391-5894-4419-800a-e06658f8a44b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 609.611502] env[63538]: DEBUG nova.compute.manager [req-60cd2691-e3ad-43a2-979a-069715abad87 req-fb4b19db-1333-4bc4-8cb4-e19483854165 service nova] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Refreshing instance network info cache due to event network-changed-514b9391-5894-4419-800a-e06658f8a44b. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 609.614576] env[63538]: DEBUG oslo_concurrency.lockutils [req-60cd2691-e3ad-43a2-979a-069715abad87 req-fb4b19db-1333-4bc4-8cb4-e19483854165 service nova] Acquiring lock "refresh_cache-1e33b68e-8509-4ec4-8ec4-dc758aae9a5a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.614576] env[63538]: DEBUG oslo_concurrency.lockutils [req-60cd2691-e3ad-43a2-979a-069715abad87 req-fb4b19db-1333-4bc4-8cb4-e19483854165 service nova] Acquired lock "refresh_cache-1e33b68e-8509-4ec4-8ec4-dc758aae9a5a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.614576] env[63538]: DEBUG nova.network.neutron [req-60cd2691-e3ad-43a2-979a-069715abad87 req-fb4b19db-1333-4bc4-8cb4-e19483854165 service nova] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Refreshing network info cache for port 514b9391-5894-4419-800a-e06658f8a44b {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 609.788697] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100448, 'name': CreateVM_Task, 'duration_secs': 0.509573} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.788931] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 609.789776] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.790137] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.790483] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 609.790737] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f1b26fa-5bf9-4307-858d-8e6f0d1788f8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.795908] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for the task: (returnval){ [ 609.795908] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52160dc2-7808-b5cb-67b2-4e0eeb2fe0f9" [ 609.795908] env[63538]: _type = "Task" [ 609.795908] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.804796] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52160dc2-7808-b5cb-67b2-4e0eeb2fe0f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.871668] env[63538]: DEBUG nova.compute.manager [req-ac05dc44-8c52-459a-93bb-525e9c6c04ec req-77ec0623-f437-4195-8e2c-eb3d6e3be98f service nova] [instance: 174368d1-9910-495b-a923-842e0440fd01] Received event network-changed-51784c09-2e46-4add-9f20-a0a9563f7eaf {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 609.871859] env[63538]: DEBUG nova.compute.manager [req-ac05dc44-8c52-459a-93bb-525e9c6c04ec req-77ec0623-f437-4195-8e2c-eb3d6e3be98f service nova] [instance: 174368d1-9910-495b-a923-842e0440fd01] Refreshing instance network info cache due to event network-changed-51784c09-2e46-4add-9f20-a0a9563f7eaf. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 609.872204] env[63538]: DEBUG oslo_concurrency.lockutils [req-ac05dc44-8c52-459a-93bb-525e9c6c04ec req-77ec0623-f437-4195-8e2c-eb3d6e3be98f service nova] Acquiring lock "refresh_cache-174368d1-9910-495b-a923-842e0440fd01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.873051] env[63538]: DEBUG oslo_concurrency.lockutils [req-ac05dc44-8c52-459a-93bb-525e9c6c04ec req-77ec0623-f437-4195-8e2c-eb3d6e3be98f service nova] Acquired lock "refresh_cache-174368d1-9910-495b-a923-842e0440fd01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.873207] env[63538]: DEBUG nova.network.neutron [req-ac05dc44-8c52-459a-93bb-525e9c6c04ec req-77ec0623-f437-4195-8e2c-eb3d6e3be98f service nova] [instance: 174368d1-9910-495b-a923-842e0440fd01] Refreshing network info cache for port 51784c09-2e46-4add-9f20-a0a9563f7eaf {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 610.068629] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.591s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.069380] env[63538]: DEBUG nova.compute.manager [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 610.075378] env[63538]: DEBUG oslo_concurrency.lockutils [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.503s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.076771] env[63538]: INFO nova.compute.claims [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 610.088579] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Task: {'id': task-5100449, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075942} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.090196] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 610.090196] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f78aea3f-f056-4e5a-9f77-5c32f1a20b52 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.117950] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] bf54098e-91a8-403f-a6fe-b58a62daaadb/bf54098e-91a8-403f-a6fe-b58a62daaadb.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 610.121014] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3853edf5-2f80-4e53-9f08-f7c0a8b1cf06 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.150218] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Waiting for the task: (returnval){ [ 610.150218] env[63538]: value = "task-5100450" [ 610.150218] env[63538]: _type = "Task" [ 610.150218] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.164767] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Task: {'id': task-5100450, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.307879] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52160dc2-7808-b5cb-67b2-4e0eeb2fe0f9, 'name': SearchDatastore_Task, 'duration_secs': 0.065381} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.309472] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 610.309472] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 610.309472] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 610.309472] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.309698] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 610.309698] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0458f9b9-a316-45be-9363-b377dae4c488 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.323358] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 610.323606] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 610.324307] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a1e2b8e-ae96-4f6e-bd57-d7195689cc29 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.334582] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for the task: (returnval){ [ 610.334582] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a2b723-1c95-6c77-4f3f-b4df30d9fe11" [ 610.334582] env[63538]: _type = "Task" [ 610.334582] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.346884] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a2b723-1c95-6c77-4f3f-b4df30d9fe11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.578098] env[63538]: DEBUG nova.compute.utils [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 610.579730] env[63538]: DEBUG nova.compute.manager [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 610.579730] env[63538]: DEBUG nova.network.neutron [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 610.590255] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Acquiring lock "10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.590736] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Lock "10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.590736] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Acquiring lock "10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.590898] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Lock "10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.590963] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Lock "10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.594653] env[63538]: INFO nova.compute.manager [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Terminating instance [ 610.597782] env[63538]: DEBUG nova.network.neutron [req-60cd2691-e3ad-43a2-979a-069715abad87 req-fb4b19db-1333-4bc4-8cb4-e19483854165 service nova] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Updated VIF entry in instance network info cache for port 514b9391-5894-4419-800a-e06658f8a44b. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 610.598335] env[63538]: DEBUG nova.network.neutron [req-60cd2691-e3ad-43a2-979a-069715abad87 req-fb4b19db-1333-4bc4-8cb4-e19483854165 service nova] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Updating instance_info_cache with network_info: [{"id": "514b9391-5894-4419-800a-e06658f8a44b", "address": "fa:16:3e:c5:61:96", "network": {"id": "c0e827ce-4abc-4178-9811-36625c1d6ebc", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1894083828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6480e3bc216427d939223b9e3b6a21b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69351262-8d39-441a-85ba-3a78df436d17", "external-id": "nsx-vlan-transportzone-205", "segmentation_id": 205, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap514b9391-58", "ovs_interfaceid": "514b9391-5894-4419-800a-e06658f8a44b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.601690] env[63538]: DEBUG nova.compute.manager [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 610.601690] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 610.602679] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8019535-8c66-4802-8d7a-f664862a1763 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.616313] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 610.620854] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-56cc0b52-414d-4a80-a9d2-6d0265f585bb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.629372] env[63538]: DEBUG oslo_vmware.api [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Waiting for the task: (returnval){ [ 610.629372] env[63538]: value = "task-5100452" [ 610.629372] env[63538]: _type = "Task" [ 610.629372] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.641732] env[63538]: DEBUG oslo_vmware.api [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Task: {'id': task-5100452, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.670644] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Task: {'id': task-5100450, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.708396] env[63538]: DEBUG nova.policy [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '18cd45786faa4ef18fa564f2e6d3b4bc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '540ca40795e54cfb8e38e203bba99ba0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 610.810871] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Acquiring lock "e32789d5-59ba-4657-9a9c-84fc9bd6cfdf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.810979] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Lock "e32789d5-59ba-4657-9a9c-84fc9bd6cfdf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.839649] env[63538]: DEBUG nova.network.neutron [req-ac05dc44-8c52-459a-93bb-525e9c6c04ec req-77ec0623-f437-4195-8e2c-eb3d6e3be98f service nova] [instance: 174368d1-9910-495b-a923-842e0440fd01] Updated VIF entry in instance network info cache for port 51784c09-2e46-4add-9f20-a0a9563f7eaf. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 610.840023] env[63538]: DEBUG nova.network.neutron [req-ac05dc44-8c52-459a-93bb-525e9c6c04ec req-77ec0623-f437-4195-8e2c-eb3d6e3be98f service nova] [instance: 174368d1-9910-495b-a923-842e0440fd01] Updating instance_info_cache with network_info: [{"id": "51784c09-2e46-4add-9f20-a0a9563f7eaf", "address": "fa:16:3e:df:3b:0a", "network": {"id": "1a29c626-75f0-4d3c-b7cf-2266e6dfe02b", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-706845743-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2488493adc8b48d29e615ebcb8a5935e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca401eaa-889a-4f9f-ac9a-56b4c41bfc06", "external-id": "nsx-vlan-transportzone-877", "segmentation_id": 877, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51784c09-2e", "ovs_interfaceid": "51784c09-2e46-4add-9f20-a0a9563f7eaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.850970] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a2b723-1c95-6c77-4f3f-b4df30d9fe11, 'name': SearchDatastore_Task, 'duration_secs': 0.020875} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.852391] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5e1d13d-dde7-45b7-aa3b-197a2419e855 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.861835] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for the task: (returnval){ [ 610.861835] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52655fd1-119d-878d-3c43-a2babcb29d3b" [ 610.861835] env[63538]: _type = "Task" [ 610.861835] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.872912] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52655fd1-119d-878d-3c43-a2babcb29d3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.087737] env[63538]: DEBUG nova.compute.manager [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 611.099234] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Acquiring lock "a6bb8713-6b00-4a43-96b7-a84ee39d790d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.100142] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Lock "a6bb8713-6b00-4a43-96b7-a84ee39d790d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.110622] env[63538]: DEBUG oslo_concurrency.lockutils [req-60cd2691-e3ad-43a2-979a-069715abad87 req-fb4b19db-1333-4bc4-8cb4-e19483854165 service nova] Releasing lock "refresh_cache-1e33b68e-8509-4ec4-8ec4-dc758aae9a5a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.148295] env[63538]: DEBUG oslo_vmware.api [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Task: {'id': task-5100452, 'name': PowerOffVM_Task, 'duration_secs': 0.425024} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.149499] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 611.149711] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 611.149996] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-978b9d17-47a0-4e20-be2b-49db13fb9434 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.169726] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Task: {'id': task-5100450, 'name': ReconfigVM_Task, 'duration_secs': 0.771073} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.173369] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Reconfigured VM instance instance-0000000e to attach disk [datastore1] bf54098e-91a8-403f-a6fe-b58a62daaadb/bf54098e-91a8-403f-a6fe-b58a62daaadb.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 611.174148] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-782724ad-166d-483d-b1ed-b2650341fe3f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.183131] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Waiting for the task: (returnval){ [ 611.183131] env[63538]: value = "task-5100454" [ 611.183131] env[63538]: _type = "Task" [ 611.183131] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.199204] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Task: {'id': task-5100454, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.233115] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 611.233175] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 611.233365] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Deleting the datastore file [datastore1] 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 611.233667] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a7742408-4985-4fe7-a4f4-bfd99c1b4ce6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.243421] env[63538]: DEBUG oslo_vmware.api [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Waiting for the task: (returnval){ [ 611.243421] env[63538]: value = "task-5100455" [ 611.243421] env[63538]: _type = "Task" [ 611.243421] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.257822] env[63538]: DEBUG oslo_vmware.api [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Task: {'id': task-5100455, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.318479] env[63538]: DEBUG nova.compute.manager [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 611.343877] env[63538]: DEBUG oslo_concurrency.lockutils [req-ac05dc44-8c52-459a-93bb-525e9c6c04ec req-77ec0623-f437-4195-8e2c-eb3d6e3be98f service nova] Releasing lock "refresh_cache-174368d1-9910-495b-a923-842e0440fd01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.374996] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52655fd1-119d-878d-3c43-a2babcb29d3b, 'name': SearchDatastore_Task, 'duration_secs': 0.023426} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.378542] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.378846] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a/1e33b68e-8509-4ec4-8ec4-dc758aae9a5a.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 611.379459] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39317d58-099a-4b82-8d3b-79d58c24ece2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.387543] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for the task: (returnval){ [ 611.387543] env[63538]: value = "task-5100456" [ 611.387543] env[63538]: _type = "Task" [ 611.387543] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.402632] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100456, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.469655] env[63538]: DEBUG nova.network.neutron [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Successfully created port: 4fd02a9a-ba01-4841-a942-ca1b96503c0e {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 611.526321] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a29a97d3-5115-474c-b1c1-9ebc15d6d76f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.541219] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a45c20eb-d12c-422b-9ac4-520c00d81443 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.582723] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c148e1f8-87fd-43e4-a4ab-b6d8d061fec3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.592439] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8663fbe-580f-4510-b732-1c20d4aa6df2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.621931] env[63538]: DEBUG nova.compute.provider_tree [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 611.699272] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Task: {'id': task-5100454, 'name': Rename_Task, 'duration_secs': 0.259309} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.699686] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 611.699958] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f5a556da-a477-4a88-aa04-f900eef83a2c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.709035] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Waiting for the task: (returnval){ [ 611.709035] env[63538]: value = "task-5100457" [ 611.709035] env[63538]: _type = "Task" [ 611.709035] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.721359] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Task: {'id': task-5100457, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.755082] env[63538]: DEBUG oslo_vmware.api [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Task: {'id': task-5100455, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.246552} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.755376] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 611.755566] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 611.755780] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 611.755987] env[63538]: INFO nova.compute.manager [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Took 1.15 seconds to destroy the instance on the hypervisor. [ 611.756316] env[63538]: DEBUG oslo.service.loopingcall [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 611.756499] env[63538]: DEBUG nova.compute.manager [-] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 611.756601] env[63538]: DEBUG nova.network.neutron [-] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 611.850371] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.899392] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100456, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.106842] env[63538]: DEBUG nova.compute.manager [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 612.125607] env[63538]: DEBUG nova.scheduler.client.report [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 612.150163] env[63538]: DEBUG nova.virt.hardware [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 612.150163] env[63538]: DEBUG nova.virt.hardware [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 612.150163] env[63538]: DEBUG nova.virt.hardware [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 612.150489] env[63538]: DEBUG nova.virt.hardware [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 612.150489] env[63538]: DEBUG nova.virt.hardware [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 612.150489] env[63538]: DEBUG nova.virt.hardware [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 612.150489] env[63538]: DEBUG nova.virt.hardware [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 612.150489] env[63538]: DEBUG nova.virt.hardware [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 612.150644] env[63538]: DEBUG nova.virt.hardware [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 612.150644] env[63538]: DEBUG nova.virt.hardware [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 612.150644] env[63538]: DEBUG nova.virt.hardware [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 612.152054] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b34ea799-b207-40b2-9db3-a14db737271c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.164870] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9542d324-76e9-4fba-8d38-c8b66450deee {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.235321] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Task: {'id': task-5100457, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.405027] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100456, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513846} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.405027] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a/1e33b68e-8509-4ec4-8ec4-dc758aae9a5a.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 612.405027] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 612.405027] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8da5db72-4351-4a5d-add0-3420591bcb77 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.411949] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for the task: (returnval){ [ 612.411949] env[63538]: value = "task-5100459" [ 612.411949] env[63538]: _type = "Task" [ 612.411949] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.423834] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100459, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.597027] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Acquiring lock "174368d1-9910-495b-a923-842e0440fd01" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 612.598241] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Lock "174368d1-9910-495b-a923-842e0440fd01" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.599292] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Acquiring lock "174368d1-9910-495b-a923-842e0440fd01-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 612.599495] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Lock "174368d1-9910-495b-a923-842e0440fd01-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.599669] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Lock "174368d1-9910-495b-a923-842e0440fd01-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.603381] env[63538]: INFO nova.compute.manager [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Terminating instance [ 612.608481] env[63538]: DEBUG nova.compute.manager [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 612.608723] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 612.609614] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b4197c0-0c64-42a1-a42b-cac2578706c6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.619153] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 612.619451] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-56064b98-d3c8-4bc4-9428-27d1fb1c6468 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.628904] env[63538]: DEBUG oslo_vmware.api [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Waiting for the task: (returnval){ [ 612.628904] env[63538]: value = "task-5100460" [ 612.628904] env[63538]: _type = "Task" [ 612.628904] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.634243] env[63538]: DEBUG oslo_concurrency.lockutils [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.559s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.634983] env[63538]: DEBUG nova.compute.manager [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 612.643453] env[63538]: DEBUG oslo_concurrency.lockutils [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.491s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.648338] env[63538]: INFO nova.compute.claims [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 612.665618] env[63538]: DEBUG oslo_vmware.api [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': task-5100460, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.730546] env[63538]: DEBUG oslo_vmware.api [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Task: {'id': task-5100457, 'name': PowerOnVM_Task, 'duration_secs': 0.660347} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.730976] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 612.731262] env[63538]: INFO nova.compute.manager [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Took 9.99 seconds to spawn the instance on the hypervisor. [ 612.731528] env[63538]: DEBUG nova.compute.manager [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 612.733969] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a737f8e9-1c42-4da6-bd6b-3fc8fa2db602 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.930266] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100459, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.111805} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.930499] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 612.931725] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31782ceb-7220-406f-a5a8-8a8befd4b903 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.966767] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a/1e33b68e-8509-4ec4-8ec4-dc758aae9a5a.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 612.967156] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43a6ba71-18d3-4ec5-9ace-6d2464ac3b58 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.999015] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for the task: (returnval){ [ 612.999015] env[63538]: value = "task-5100461" [ 612.999015] env[63538]: _type = "Task" [ 612.999015] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.013021] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100461, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.140044] env[63538]: DEBUG oslo_vmware.api [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': task-5100460, 'name': PowerOffVM_Task, 'duration_secs': 0.435722} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.140327] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 613.141024] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 613.141024] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e9052a1d-7799-40c1-8cb7-bc474920ad3a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.160023] env[63538]: DEBUG nova.compute.utils [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 613.160023] env[63538]: DEBUG nova.compute.manager [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 613.160023] env[63538]: DEBUG nova.network.neutron [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 613.221082] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 613.221082] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 613.221082] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Deleting the datastore file [datastore1] 174368d1-9910-495b-a923-842e0440fd01 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 613.221082] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3940df85-615c-4add-921d-2f19d766ebb6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.230050] env[63538]: DEBUG oslo_vmware.api [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Waiting for the task: (returnval){ [ 613.230050] env[63538]: value = "task-5100463" [ 613.230050] env[63538]: _type = "Task" [ 613.230050] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.240795] env[63538]: DEBUG oslo_vmware.api [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': task-5100463, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.265889] env[63538]: INFO nova.compute.manager [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Took 28.21 seconds to build instance. [ 613.394300] env[63538]: DEBUG nova.policy [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd6da2021f6d4ff9a81f18cb792f8963', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '81348a2052934087a4b147aad4e7eb39', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 613.490218] env[63538]: DEBUG nova.network.neutron [-] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.513394] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100461, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.664901] env[63538]: DEBUG nova.compute.manager [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 613.751266] env[63538]: DEBUG oslo_vmware.api [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Task: {'id': task-5100463, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.342816} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.751768] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 613.752197] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 613.752944] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 613.753276] env[63538]: INFO nova.compute.manager [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] [instance: 174368d1-9910-495b-a923-842e0440fd01] Took 1.14 seconds to destroy the instance on the hypervisor. [ 613.753662] env[63538]: DEBUG oslo.service.loopingcall [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 613.757311] env[63538]: DEBUG nova.compute.manager [-] [instance: 174368d1-9910-495b-a923-842e0440fd01] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 613.757559] env[63538]: DEBUG nova.network.neutron [-] [instance: 174368d1-9910-495b-a923-842e0440fd01] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 613.767822] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ea2ebc42-8612-4955-945f-78ec81f0f0f2 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Lock "bf54098e-91a8-403f-a6fe-b58a62daaadb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.613s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.997657] env[63538]: INFO nova.compute.manager [-] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Took 2.24 seconds to deallocate network for instance. [ 614.025270] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100461, 'name': ReconfigVM_Task, 'duration_secs': 0.632182} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.025270] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Reconfigured VM instance instance-0000000f to attach disk [datastore1] 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a/1e33b68e-8509-4ec4-8ec4-dc758aae9a5a.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 614.025270] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c93251b1-2d31-43ea-a0db-0056c2c38439 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.038536] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for the task: (returnval){ [ 614.038536] env[63538]: value = "task-5100464" [ 614.038536] env[63538]: _type = "Task" [ 614.038536] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.052703] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100464, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.155980] env[63538]: DEBUG nova.compute.manager [req-f256dd73-f3cc-4a80-b970-7da85a81c6f2 req-1793b276-f91a-42a5-9e84-4ab54cd9381d service nova] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Received event network-vif-deleted-589ae6e1-42c9-49d2-bc0b-2615e1b4cfb0 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 614.200670] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16530dbd-14b8-4882-ac8c-5b2874525499 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.212509] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2f7d520-aedf-4c64-b2f2-0fc4a026acee {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.253983] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b6973c-e689-496a-816b-d47756a27d3a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.265376] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa04e28a-897b-4712-ba02-054511118862 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.270222] env[63538]: DEBUG nova.compute.manager [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 614.287430] env[63538]: DEBUG nova.compute.provider_tree [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 614.510290] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.549361] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100464, 'name': Rename_Task, 'duration_secs': 0.359872} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.549641] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 614.549886] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e2b9b884-2591-4dee-aab3-95f76113895b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.556814] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for the task: (returnval){ [ 614.556814] env[63538]: value = "task-5100466" [ 614.556814] env[63538]: _type = "Task" [ 614.556814] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.565457] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100466, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.683998] env[63538]: DEBUG nova.compute.manager [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 614.721529] env[63538]: DEBUG nova.virt.hardware [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 614.722322] env[63538]: DEBUG nova.virt.hardware [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 614.722322] env[63538]: DEBUG nova.virt.hardware [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 614.722451] env[63538]: DEBUG nova.virt.hardware [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 614.722512] env[63538]: DEBUG nova.virt.hardware [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 614.722702] env[63538]: DEBUG nova.virt.hardware [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 614.724259] env[63538]: DEBUG nova.virt.hardware [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 614.724259] env[63538]: DEBUG nova.virt.hardware [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 614.724259] env[63538]: DEBUG nova.virt.hardware [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 614.724259] env[63538]: DEBUG nova.virt.hardware [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 614.724259] env[63538]: DEBUG nova.virt.hardware [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 614.724757] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6efb74b6-b263-498a-b78f-53be107e8771 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.734688] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f96c307d-2ad1-471f-bf99-da8dd2b1d499 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.790697] env[63538]: DEBUG nova.scheduler.client.report [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 614.817969] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.856754] env[63538]: DEBUG nova.network.neutron [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Successfully updated port: 4fd02a9a-ba01-4841-a942-ca1b96503c0e {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 614.925422] env[63538]: DEBUG nova.compute.manager [req-a8935495-b894-4062-a8b3-36f00db74ad9 req-6e0d4fc0-0e2f-4108-b664-b2f9549a18d8 service nova] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Received event network-vif-plugged-4fd02a9a-ba01-4841-a942-ca1b96503c0e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 614.925694] env[63538]: DEBUG oslo_concurrency.lockutils [req-a8935495-b894-4062-a8b3-36f00db74ad9 req-6e0d4fc0-0e2f-4108-b664-b2f9549a18d8 service nova] Acquiring lock "102c0463-fb64-4dda-914c-b98c8e9991ad-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.926019] env[63538]: DEBUG oslo_concurrency.lockutils [req-a8935495-b894-4062-a8b3-36f00db74ad9 req-6e0d4fc0-0e2f-4108-b664-b2f9549a18d8 service nova] Lock "102c0463-fb64-4dda-914c-b98c8e9991ad-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.926191] env[63538]: DEBUG oslo_concurrency.lockutils [req-a8935495-b894-4062-a8b3-36f00db74ad9 req-6e0d4fc0-0e2f-4108-b664-b2f9549a18d8 service nova] Lock "102c0463-fb64-4dda-914c-b98c8e9991ad-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 614.926326] env[63538]: DEBUG nova.compute.manager [req-a8935495-b894-4062-a8b3-36f00db74ad9 req-6e0d4fc0-0e2f-4108-b664-b2f9549a18d8 service nova] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] No waiting events found dispatching network-vif-plugged-4fd02a9a-ba01-4841-a942-ca1b96503c0e {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 614.926507] env[63538]: WARNING nova.compute.manager [req-a8935495-b894-4062-a8b3-36f00db74ad9 req-6e0d4fc0-0e2f-4108-b664-b2f9549a18d8 service nova] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Received unexpected event network-vif-plugged-4fd02a9a-ba01-4841-a942-ca1b96503c0e for instance with vm_state building and task_state spawning. [ 615.075867] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100466, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.140797] env[63538]: DEBUG nova.network.neutron [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Successfully created port: 16b1a07f-5af3-4a11-967b-acc2df708c1d {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 615.305171] env[63538]: DEBUG oslo_concurrency.lockutils [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.661s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 615.305171] env[63538]: DEBUG nova.compute.manager [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 615.308330] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.968s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.309842] env[63538]: INFO nova.compute.claims [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 615.359910] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Acquiring lock "refresh_cache-102c0463-fb64-4dda-914c-b98c8e9991ad" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 615.359910] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Acquired lock "refresh_cache-102c0463-fb64-4dda-914c-b98c8e9991ad" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.359910] env[63538]: DEBUG nova.network.neutron [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 615.519710] env[63538]: DEBUG nova.network.neutron [-] [instance: 174368d1-9910-495b-a923-842e0440fd01] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.570291] env[63538]: DEBUG oslo_vmware.api [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100466, 'name': PowerOnVM_Task, 'duration_secs': 0.63156} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.570291] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 615.574396] env[63538]: INFO nova.compute.manager [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Took 10.26 seconds to spawn the instance on the hypervisor. [ 615.574710] env[63538]: DEBUG nova.compute.manager [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 615.576115] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f68ca7fe-180f-4e6d-b562-6e815d24ae3e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.818077] env[63538]: DEBUG nova.compute.utils [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 615.824054] env[63538]: DEBUG nova.compute.manager [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 615.824054] env[63538]: DEBUG nova.network.neutron [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 615.958279] env[63538]: DEBUG nova.network.neutron [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 616.018785] env[63538]: DEBUG nova.policy [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '884364daa8f746fb9b32a8b33c9e2cfd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea05f3fb4676466bb2a286f5a2fefb8f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 616.026526] env[63538]: INFO nova.compute.manager [-] [instance: 174368d1-9910-495b-a923-842e0440fd01] Took 2.27 seconds to deallocate network for instance. [ 616.099736] env[63538]: INFO nova.compute.manager [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Took 29.55 seconds to build instance. [ 616.328784] env[63538]: DEBUG nova.compute.manager [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 616.541788] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.603563] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2c5d1525-a696-4977-ae71-3a68567a88af tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Lock "1e33b68e-8509-4ec4-8ec4-dc758aae9a5a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.855s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.804210] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82cacb0b-7bff-4771-97b8-5433ec9361ce {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.819255] env[63538]: DEBUG nova.network.neutron [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Updating instance_info_cache with network_info: [{"id": "4fd02a9a-ba01-4841-a942-ca1b96503c0e", "address": "fa:16:3e:b7:48:f2", "network": {"id": "22f4bf41-624c-46c3-8d4b-9e72e652bacb", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-829996620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "540ca40795e54cfb8e38e203bba99ba0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fd02a9a-ba", "ovs_interfaceid": "4fd02a9a-ba01-4841-a942-ca1b96503c0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.825177] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d541491-eac0-4e22-bece-9aea84ee52e6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.865209] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37029cb2-4a2b-46d8-9b6a-7f428febebea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.874274] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18dade18-096c-4236-86d4-9e92a980594f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.895426] env[63538]: DEBUG nova.compute.provider_tree [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 616.996345] env[63538]: DEBUG nova.network.neutron [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Successfully created port: f752fb93-15ab-4803-9e58-012b22d5f121 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 617.049650] env[63538]: DEBUG nova.compute.manager [req-f61fa006-d147-48f7-971a-690f6ce4d38c req-a73b48ae-cb9a-4065-a604-55b948b0ec4c service nova] [instance: 174368d1-9910-495b-a923-842e0440fd01] Received event network-vif-deleted-51784c09-2e46-4add-9f20-a0a9563f7eaf {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 617.325127] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Releasing lock "refresh_cache-102c0463-fb64-4dda-914c-b98c8e9991ad" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 617.328059] env[63538]: DEBUG nova.compute.manager [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Instance network_info: |[{"id": "4fd02a9a-ba01-4841-a942-ca1b96503c0e", "address": "fa:16:3e:b7:48:f2", "network": {"id": "22f4bf41-624c-46c3-8d4b-9e72e652bacb", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-829996620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "540ca40795e54cfb8e38e203bba99ba0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fd02a9a-ba", "ovs_interfaceid": "4fd02a9a-ba01-4841-a942-ca1b96503c0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 617.329342] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:48:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7654928b-7afe-42e3-a18d-68ecc775cefe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4fd02a9a-ba01-4841-a942-ca1b96503c0e', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 617.341786] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Creating folder: Project (540ca40795e54cfb8e38e203bba99ba0). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 617.341786] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf6ae3f6-f81c-474b-815c-b4fa6305b7dc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.356084] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Created folder: Project (540ca40795e54cfb8e38e203bba99ba0) in parent group-v992234. [ 617.356084] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Creating folder: Instances. Parent ref: group-v992282. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 617.356084] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9b0f27ec-7ec8-420a-9554-2308a6e1a9e1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.368186] env[63538]: DEBUG nova.compute.manager [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 617.372179] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Created folder: Instances in parent group-v992282. [ 617.372548] env[63538]: DEBUG oslo.service.loopingcall [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 617.372779] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 617.372956] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c893af62-ea26-4240-a8ea-2fbede5f9c2b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.398848] env[63538]: DEBUG nova.scheduler.client.report [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 617.407221] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 617.407221] env[63538]: value = "task-5100469" [ 617.407221] env[63538]: _type = "Task" [ 617.407221] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.427171] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100469, 'name': CreateVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.429469] env[63538]: DEBUG nova.virt.hardware [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 617.429764] env[63538]: DEBUG nova.virt.hardware [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 617.429909] env[63538]: DEBUG nova.virt.hardware [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 617.430120] env[63538]: DEBUG nova.virt.hardware [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 617.430279] env[63538]: DEBUG nova.virt.hardware [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 617.430415] env[63538]: DEBUG nova.virt.hardware [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 617.430632] env[63538]: DEBUG nova.virt.hardware [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 617.430787] env[63538]: DEBUG nova.virt.hardware [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 617.430954] env[63538]: DEBUG nova.virt.hardware [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 617.431140] env[63538]: DEBUG nova.virt.hardware [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 617.431309] env[63538]: DEBUG nova.virt.hardware [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 617.433024] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f0a4d7-6d6f-45a1-b8fb-c1d9473ef79e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.443507] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65555fe-3ae8-42be-9b85-905febe12f2c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.909028] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.600s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.909567] env[63538]: DEBUG nova.compute.manager [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 617.912811] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.145s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.912811] env[63538]: DEBUG nova.objects.instance [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Lazy-loading 'resources' on Instance uuid e1710498-0616-4862-afc0-6e452dc19882 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 617.931600] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100469, 'name': CreateVM_Task, 'duration_secs': 0.405288} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.931600] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 617.931600] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 617.931600] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.931600] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 617.931791] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2849fa39-c333-40c0-953f-f9a1cf93341b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.936788] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Waiting for the task: (returnval){ [ 617.936788] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526b1916-4614-df6c-477e-ac8b8f032d2a" [ 617.936788] env[63538]: _type = "Task" [ 617.936788] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.947159] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526b1916-4614-df6c-477e-ac8b8f032d2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.157833] env[63538]: DEBUG nova.network.neutron [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Successfully updated port: 16b1a07f-5af3-4a11-967b-acc2df708c1d {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 618.419198] env[63538]: DEBUG nova.compute.utils [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 618.424756] env[63538]: DEBUG nova.compute.manager [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 618.431454] env[63538]: DEBUG nova.network.neutron [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 618.459719] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526b1916-4614-df6c-477e-ac8b8f032d2a, 'name': SearchDatastore_Task, 'duration_secs': 0.011312} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.459938] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 618.460173] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 618.460434] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 618.460588] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.460757] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 618.461360] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-03510970-899c-4847-a01d-f7e42c876423 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.476548] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 618.476732] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 618.477526] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d26ca55d-0b9c-4472-88e7-8a8b57f886fb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.492718] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Waiting for the task: (returnval){ [ 618.492718] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]528af165-f1fc-e5a1-3f4a-ba86d78c1a90" [ 618.492718] env[63538]: _type = "Task" [ 618.492718] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.509856] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]528af165-f1fc-e5a1-3f4a-ba86d78c1a90, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.524318] env[63538]: DEBUG nova.policy [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '311a3712ead145899a7768b5297f056a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f5784127fe9d4eefaa1f55f0eacdb91d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 618.667416] env[63538]: DEBUG oslo_concurrency.lockutils [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Acquiring lock "refresh_cache-543875b5-195a-476d-a0b4-3211ceefa27f" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 618.667416] env[63538]: DEBUG oslo_concurrency.lockutils [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Acquired lock "refresh_cache-543875b5-195a-476d-a0b4-3211ceefa27f" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.667416] env[63538]: DEBUG nova.network.neutron [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 618.927803] env[63538]: DEBUG nova.compute.manager [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 618.957726] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ffc8c3-eb47-47ef-9834-d8fb6b52f764 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.970507] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3d1243b-17a5-43c6-a805-4d414459a6f6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.013437] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15cb4855-28eb-40c0-8447-fe349449f8e9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.024715] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]528af165-f1fc-e5a1-3f4a-ba86d78c1a90, 'name': SearchDatastore_Task, 'duration_secs': 0.020776} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.025583] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37f95c4c-e40b-4299-891a-be297cec5b8e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.029146] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1f614b0-850c-4e1a-8567-a5cec00bfad0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.039110] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Waiting for the task: (returnval){ [ 619.039110] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d5091c-d37f-e99a-298a-1baf344d80cd" [ 619.039110] env[63538]: _type = "Task" [ 619.039110] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.052040] env[63538]: DEBUG nova.compute.provider_tree [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 619.063864] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d5091c-d37f-e99a-298a-1baf344d80cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.244258] env[63538]: DEBUG nova.network.neutron [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 619.384158] env[63538]: DEBUG nova.compute.manager [req-d8a4e6ac-02fa-4538-9321-dc1e75bb0865 req-9715d270-6ad8-4f38-b1ac-4034f19c52df service nova] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Received event network-changed-4fd02a9a-ba01-4841-a942-ca1b96503c0e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 619.386180] env[63538]: DEBUG nova.compute.manager [req-d8a4e6ac-02fa-4538-9321-dc1e75bb0865 req-9715d270-6ad8-4f38-b1ac-4034f19c52df service nova] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Refreshing instance network info cache due to event network-changed-4fd02a9a-ba01-4841-a942-ca1b96503c0e. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 619.386180] env[63538]: DEBUG oslo_concurrency.lockutils [req-d8a4e6ac-02fa-4538-9321-dc1e75bb0865 req-9715d270-6ad8-4f38-b1ac-4034f19c52df service nova] Acquiring lock "refresh_cache-102c0463-fb64-4dda-914c-b98c8e9991ad" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 619.386180] env[63538]: DEBUG oslo_concurrency.lockutils [req-d8a4e6ac-02fa-4538-9321-dc1e75bb0865 req-9715d270-6ad8-4f38-b1ac-4034f19c52df service nova] Acquired lock "refresh_cache-102c0463-fb64-4dda-914c-b98c8e9991ad" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.386180] env[63538]: DEBUG nova.network.neutron [req-d8a4e6ac-02fa-4538-9321-dc1e75bb0865 req-9715d270-6ad8-4f38-b1ac-4034f19c52df service nova] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Refreshing network info cache for port 4fd02a9a-ba01-4841-a942-ca1b96503c0e {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 619.477363] env[63538]: DEBUG nova.network.neutron [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Successfully created port: 38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 619.562287] env[63538]: DEBUG nova.scheduler.client.report [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 619.574137] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d5091c-d37f-e99a-298a-1baf344d80cd, 'name': SearchDatastore_Task, 'duration_secs': 0.02563} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.574441] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 619.574664] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 102c0463-fb64-4dda-914c-b98c8e9991ad/102c0463-fb64-4dda-914c-b98c8e9991ad.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 619.575101] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fe7f588c-cc7c-4bd2-b254-6258376e625e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.587640] env[63538]: DEBUG nova.network.neutron [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Successfully updated port: f752fb93-15ab-4803-9e58-012b22d5f121 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 619.590044] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Waiting for the task: (returnval){ [ 619.590044] env[63538]: value = "task-5100470" [ 619.590044] env[63538]: _type = "Task" [ 619.590044] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.606892] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Task: {'id': task-5100470, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.723842] env[63538]: DEBUG nova.network.neutron [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Updating instance_info_cache with network_info: [{"id": "16b1a07f-5af3-4a11-967b-acc2df708c1d", "address": "fa:16:3e:e4:04:09", "network": {"id": "93c6d210-dcf5-4f42-a7de-20b9e4f21717", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-124766487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81348a2052934087a4b147aad4e7eb39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16b1a07f-5a", "ovs_interfaceid": "16b1a07f-5af3-4a11-967b-acc2df708c1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.938915] env[63538]: DEBUG nova.compute.manager [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 619.990304] env[63538]: DEBUG nova.virt.hardware [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 619.990838] env[63538]: DEBUG nova.virt.hardware [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 619.991136] env[63538]: DEBUG nova.virt.hardware [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 619.991429] env[63538]: DEBUG nova.virt.hardware [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 619.991660] env[63538]: DEBUG nova.virt.hardware [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 619.991901] env[63538]: DEBUG nova.virt.hardware [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 619.992260] env[63538]: DEBUG nova.virt.hardware [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 619.992521] env[63538]: DEBUG nova.virt.hardware [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 619.994335] env[63538]: DEBUG nova.virt.hardware [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 619.994335] env[63538]: DEBUG nova.virt.hardware [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 619.994335] env[63538]: DEBUG nova.virt.hardware [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 619.994967] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7c617e-1bd0-4ee0-9ee2-efb8eccfcb20 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.013838] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d22d3836-9c90-4307-9480-b8227de09812 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.068833] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.156s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 620.071900] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.495s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.074302] env[63538]: INFO nova.compute.claims [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 620.105959] env[63538]: DEBUG oslo_concurrency.lockutils [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "refresh_cache-ede967c0-ec3a-4f26-8290-0ee36890cd75" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 620.106286] env[63538]: DEBUG oslo_concurrency.lockutils [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired lock "refresh_cache-ede967c0-ec3a-4f26-8290-0ee36890cd75" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.106405] env[63538]: DEBUG nova.network.neutron [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 620.111685] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Task: {'id': task-5100470, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.113043] env[63538]: INFO nova.scheduler.client.report [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Deleted allocations for instance e1710498-0616-4862-afc0-6e452dc19882 [ 620.227888] env[63538]: DEBUG oslo_concurrency.lockutils [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Releasing lock "refresh_cache-543875b5-195a-476d-a0b4-3211ceefa27f" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 620.232058] env[63538]: DEBUG nova.compute.manager [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Instance network_info: |[{"id": "16b1a07f-5af3-4a11-967b-acc2df708c1d", "address": "fa:16:3e:e4:04:09", "network": {"id": "93c6d210-dcf5-4f42-a7de-20b9e4f21717", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-124766487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81348a2052934087a4b147aad4e7eb39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16b1a07f-5a", "ovs_interfaceid": "16b1a07f-5af3-4a11-967b-acc2df708c1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 620.232308] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:04:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb3425ea-72e7-41e3-92a7-820db9ec4661', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '16b1a07f-5af3-4a11-967b-acc2df708c1d', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 620.240719] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Creating folder: Project (81348a2052934087a4b147aad4e7eb39). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 620.241896] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0906fcfb-351a-49c0-b0b1-6a138c77e046 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.255710] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Created folder: Project (81348a2052934087a4b147aad4e7eb39) in parent group-v992234. [ 620.256249] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Creating folder: Instances. Parent ref: group-v992285. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 620.256400] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f29ff66f-37b5-4d6e-a519-9d4e0505bfda {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.278278] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Created folder: Instances in parent group-v992285. [ 620.278278] env[63538]: DEBUG oslo.service.loopingcall [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 620.279133] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 620.279443] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-79eb825b-43e8-4a68-8eb7-f0c77b4aef9e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.313012] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 620.313012] env[63538]: value = "task-5100473" [ 620.313012] env[63538]: _type = "Task" [ 620.313012] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.321504] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100473, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.609250] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Task: {'id': task-5100470, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.673074} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.611974] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 102c0463-fb64-4dda-914c-b98c8e9991ad/102c0463-fb64-4dda-914c-b98c8e9991ad.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 620.612648] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 620.612983] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ea67cab0-4fef-4ae7-b74d-da816ea1879c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.620601] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8afc216-4b2a-4d69-b417-510b25dc2b6c tempest-ServerDiagnosticsTest-769220963 tempest-ServerDiagnosticsTest-769220963-project-member] Lock "e1710498-0616-4862-afc0-6e452dc19882" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.388s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 620.630473] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Waiting for the task: (returnval){ [ 620.630473] env[63538]: value = "task-5100474" [ 620.630473] env[63538]: _type = "Task" [ 620.630473] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.647463] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Task: {'id': task-5100474, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.681378] env[63538]: DEBUG nova.network.neutron [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 620.721985] env[63538]: DEBUG oslo_concurrency.lockutils [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Acquiring lock "47500aaa-92fc-454c-badd-d6f8a2203083" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.722683] env[63538]: DEBUG oslo_concurrency.lockutils [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Lock "47500aaa-92fc-454c-badd-d6f8a2203083" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.765796] env[63538]: DEBUG nova.network.neutron [req-d8a4e6ac-02fa-4538-9321-dc1e75bb0865 req-9715d270-6ad8-4f38-b1ac-4034f19c52df service nova] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Updated VIF entry in instance network info cache for port 4fd02a9a-ba01-4841-a942-ca1b96503c0e. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 620.766286] env[63538]: DEBUG nova.network.neutron [req-d8a4e6ac-02fa-4538-9321-dc1e75bb0865 req-9715d270-6ad8-4f38-b1ac-4034f19c52df service nova] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Updating instance_info_cache with network_info: [{"id": "4fd02a9a-ba01-4841-a942-ca1b96503c0e", "address": "fa:16:3e:b7:48:f2", "network": {"id": "22f4bf41-624c-46c3-8d4b-9e72e652bacb", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-829996620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "540ca40795e54cfb8e38e203bba99ba0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fd02a9a-ba", "ovs_interfaceid": "4fd02a9a-ba01-4841-a942-ca1b96503c0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.823186] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100473, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.037893] env[63538]: DEBUG nova.network.neutron [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Updating instance_info_cache with network_info: [{"id": "f752fb93-15ab-4803-9e58-012b22d5f121", "address": "fa:16:3e:0f:19:27", "network": {"id": "4aa7976c-5900-4be2-a5e2-8d641ac24be9", "bridge": "br-int", "label": "tempest-ServersTestJSON-119331503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea05f3fb4676466bb2a286f5a2fefb8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf752fb93-15", "ovs_interfaceid": "f752fb93-15ab-4803-9e58-012b22d5f121", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.151158] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Task: {'id': task-5100474, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078951} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.162236] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 621.162236] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d7bbe1e-60c2-4c6d-a5c0-33282d2be68a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.185957] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] 102c0463-fb64-4dda-914c-b98c8e9991ad/102c0463-fb64-4dda-914c-b98c8e9991ad.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 621.190096] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-afa2b506-ebd6-4a7f-8de8-543afcc7d8d1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.213861] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Waiting for the task: (returnval){ [ 621.213861] env[63538]: value = "task-5100475" [ 621.213861] env[63538]: _type = "Task" [ 621.213861] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.228670] env[63538]: DEBUG nova.compute.manager [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 621.235494] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Task: {'id': task-5100475, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.274985] env[63538]: DEBUG oslo_concurrency.lockutils [req-d8a4e6ac-02fa-4538-9321-dc1e75bb0865 req-9715d270-6ad8-4f38-b1ac-4034f19c52df service nova] Releasing lock "refresh_cache-102c0463-fb64-4dda-914c-b98c8e9991ad" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 621.328471] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100473, 'name': CreateVM_Task, 'duration_secs': 0.526217} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.330387] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 621.330696] env[63538]: DEBUG oslo_concurrency.lockutils [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 621.330794] env[63538]: DEBUG oslo_concurrency.lockutils [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.332593] env[63538]: DEBUG oslo_concurrency.lockutils [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 621.332593] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43d6d951-131c-4b75-b73a-616dec2be69a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.339527] env[63538]: DEBUG oslo_vmware.api [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Waiting for the task: (returnval){ [ 621.339527] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d35c4f-0a52-1c75-33ab-4a35f5c6c02e" [ 621.339527] env[63538]: _type = "Task" [ 621.339527] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.351195] env[63538]: DEBUG oslo_vmware.api [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d35c4f-0a52-1c75-33ab-4a35f5c6c02e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.541012] env[63538]: DEBUG oslo_concurrency.lockutils [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Releasing lock "refresh_cache-ede967c0-ec3a-4f26-8290-0ee36890cd75" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 621.541335] env[63538]: DEBUG nova.compute.manager [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Instance network_info: |[{"id": "f752fb93-15ab-4803-9e58-012b22d5f121", "address": "fa:16:3e:0f:19:27", "network": {"id": "4aa7976c-5900-4be2-a5e2-8d641ac24be9", "bridge": "br-int", "label": "tempest-ServersTestJSON-119331503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea05f3fb4676466bb2a286f5a2fefb8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf752fb93-15", "ovs_interfaceid": "f752fb93-15ab-4803-9e58-012b22d5f121", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 621.541963] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:19:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f752fb93-15ab-4803-9e58-012b22d5f121', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 621.551056] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Creating folder: Project (ea05f3fb4676466bb2a286f5a2fefb8f). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 621.551773] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-722916a7-69ba-40e0-8921-34ccb14ae3d1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.568407] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Created folder: Project (ea05f3fb4676466bb2a286f5a2fefb8f) in parent group-v992234. [ 621.568633] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Creating folder: Instances. Parent ref: group-v992288. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 621.570176] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6989e2a9-f2b7-4d70-809f-4f8838d589db {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.573316] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8da7612-7803-4491-b9b8-a549e3e67093 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.582600] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c00a990-e23e-4844-a43f-61db171e1aec {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.588135] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Created folder: Instances in parent group-v992288. [ 621.589690] env[63538]: DEBUG oslo.service.loopingcall [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 621.589690] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 621.589927] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-168af0f6-c4be-4f6e-b28e-4660de971b78 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.635341] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91ddbbf-abd2-4d0f-b1db-9babcf3f8e8c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.639282] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 621.639282] env[63538]: value = "task-5100478" [ 621.639282] env[63538]: _type = "Task" [ 621.639282] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.651273] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d4a2c2-9226-4f9e-a33b-982a1f0a6c7e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.656030] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100478, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.673096] env[63538]: DEBUG nova.compute.provider_tree [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 621.744537] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Task: {'id': task-5100475, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.764624] env[63538]: DEBUG oslo_concurrency.lockutils [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.854231] env[63538]: DEBUG oslo_vmware.api [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d35c4f-0a52-1c75-33ab-4a35f5c6c02e, 'name': SearchDatastore_Task, 'duration_secs': 0.066276} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.854622] env[63538]: DEBUG oslo_concurrency.lockutils [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 621.854919] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 621.858775] env[63538]: DEBUG oslo_concurrency.lockutils [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 621.858775] env[63538]: DEBUG oslo_concurrency.lockutils [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.858775] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 621.859490] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f44d426-63aa-4849-85e7-56cdacab8c67 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.876282] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 621.876652] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 621.877531] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01cd5426-22b5-4389-9f1a-e13287869e38 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.888937] env[63538]: DEBUG oslo_vmware.api [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Waiting for the task: (returnval){ [ 621.888937] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526a657f-72cf-9898-8a8c-d540227fe84e" [ 621.888937] env[63538]: _type = "Task" [ 621.888937] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.914765] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "b5593b74-fe89-43f5-a8c6-e73159b4efac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.918041] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "b5593b74-fe89-43f5-a8c6-e73159b4efac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.918041] env[63538]: DEBUG oslo_vmware.api [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526a657f-72cf-9898-8a8c-d540227fe84e, 'name': SearchDatastore_Task, 'duration_secs': 0.01369} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.918041] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8aeaee7-7835-4fd0-bc35-b56bf5653129 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.933266] env[63538]: DEBUG oslo_vmware.api [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Waiting for the task: (returnval){ [ 621.933266] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e86add-1ff0-27f1-90d9-4bb9ac7d1c98" [ 621.933266] env[63538]: _type = "Task" [ 621.933266] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.950421] env[63538]: DEBUG oslo_vmware.api [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e86add-1ff0-27f1-90d9-4bb9ac7d1c98, 'name': SearchDatastore_Task, 'duration_secs': 0.014625} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.950526] env[63538]: DEBUG oslo_concurrency.lockutils [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 621.951523] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 543875b5-195a-476d-a0b4-3211ceefa27f/543875b5-195a-476d-a0b4-3211ceefa27f.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 621.951523] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b5ab5c3-e753-46ef-9bc4-dcc4fc254d8b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.962465] env[63538]: DEBUG oslo_vmware.api [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Waiting for the task: (returnval){ [ 621.962465] env[63538]: value = "task-5100479" [ 621.962465] env[63538]: _type = "Task" [ 621.962465] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.976163] env[63538]: DEBUG oslo_vmware.api [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Task: {'id': task-5100479, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.153145] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100478, 'name': CreateVM_Task, 'duration_secs': 0.388749} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.153550] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 622.154252] env[63538]: DEBUG oslo_concurrency.lockutils [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.154252] env[63538]: DEBUG oslo_concurrency.lockutils [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.154709] env[63538]: DEBUG oslo_concurrency.lockutils [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 622.154924] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-987b1942-a8e6-48f4-b66c-9cfe3fb76a5e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.163160] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 622.163160] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52eda683-3df6-e616-93b8-53dcb0c62501" [ 622.163160] env[63538]: _type = "Task" [ 622.163160] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.173573] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52eda683-3df6-e616-93b8-53dcb0c62501, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.178659] env[63538]: DEBUG nova.scheduler.client.report [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 622.187642] env[63538]: DEBUG nova.compute.manager [req-ac16102e-7310-46d8-9737-9f1d134ea3c6 req-5dae65a7-8aad-43a6-930a-840f8ae2d198 service nova] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Received event network-vif-plugged-16b1a07f-5af3-4a11-967b-acc2df708c1d {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 622.187840] env[63538]: DEBUG oslo_concurrency.lockutils [req-ac16102e-7310-46d8-9737-9f1d134ea3c6 req-5dae65a7-8aad-43a6-930a-840f8ae2d198 service nova] Acquiring lock "543875b5-195a-476d-a0b4-3211ceefa27f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.188193] env[63538]: DEBUG oslo_concurrency.lockutils [req-ac16102e-7310-46d8-9737-9f1d134ea3c6 req-5dae65a7-8aad-43a6-930a-840f8ae2d198 service nova] Lock "543875b5-195a-476d-a0b4-3211ceefa27f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.188240] env[63538]: DEBUG oslo_concurrency.lockutils [req-ac16102e-7310-46d8-9737-9f1d134ea3c6 req-5dae65a7-8aad-43a6-930a-840f8ae2d198 service nova] Lock "543875b5-195a-476d-a0b4-3211ceefa27f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 622.188384] env[63538]: DEBUG nova.compute.manager [req-ac16102e-7310-46d8-9737-9f1d134ea3c6 req-5dae65a7-8aad-43a6-930a-840f8ae2d198 service nova] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] No waiting events found dispatching network-vif-plugged-16b1a07f-5af3-4a11-967b-acc2df708c1d {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 622.188543] env[63538]: WARNING nova.compute.manager [req-ac16102e-7310-46d8-9737-9f1d134ea3c6 req-5dae65a7-8aad-43a6-930a-840f8ae2d198 service nova] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Received unexpected event network-vif-plugged-16b1a07f-5af3-4a11-967b-acc2df708c1d for instance with vm_state building and task_state spawning. [ 622.188700] env[63538]: DEBUG nova.compute.manager [req-ac16102e-7310-46d8-9737-9f1d134ea3c6 req-5dae65a7-8aad-43a6-930a-840f8ae2d198 service nova] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Received event network-changed-16b1a07f-5af3-4a11-967b-acc2df708c1d {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 622.188849] env[63538]: DEBUG nova.compute.manager [req-ac16102e-7310-46d8-9737-9f1d134ea3c6 req-5dae65a7-8aad-43a6-930a-840f8ae2d198 service nova] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Refreshing instance network info cache due to event network-changed-16b1a07f-5af3-4a11-967b-acc2df708c1d. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 622.189211] env[63538]: DEBUG oslo_concurrency.lockutils [req-ac16102e-7310-46d8-9737-9f1d134ea3c6 req-5dae65a7-8aad-43a6-930a-840f8ae2d198 service nova] Acquiring lock "refresh_cache-543875b5-195a-476d-a0b4-3211ceefa27f" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.189211] env[63538]: DEBUG oslo_concurrency.lockutils [req-ac16102e-7310-46d8-9737-9f1d134ea3c6 req-5dae65a7-8aad-43a6-930a-840f8ae2d198 service nova] Acquired lock "refresh_cache-543875b5-195a-476d-a0b4-3211ceefa27f" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.189327] env[63538]: DEBUG nova.network.neutron [req-ac16102e-7310-46d8-9737-9f1d134ea3c6 req-5dae65a7-8aad-43a6-930a-840f8ae2d198 service nova] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Refreshing network info cache for port 16b1a07f-5af3-4a11-967b-acc2df708c1d {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 622.225870] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Task: {'id': task-5100475, 'name': ReconfigVM_Task, 'duration_secs': 0.72524} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.226170] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Reconfigured VM instance instance-00000010 to attach disk [datastore1] 102c0463-fb64-4dda-914c-b98c8e9991ad/102c0463-fb64-4dda-914c-b98c8e9991ad.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 622.227203] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1b42e1d3-9aac-486f-abd7-a0a7cb56627d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.234627] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Waiting for the task: (returnval){ [ 622.234627] env[63538]: value = "task-5100480" [ 622.234627] env[63538]: _type = "Task" [ 622.234627] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.245023] env[63538]: DEBUG nova.network.neutron [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Successfully updated port: 38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 622.251991] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Task: {'id': task-5100480, 'name': Rename_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.474703] env[63538]: DEBUG oslo_vmware.api [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Task: {'id': task-5100479, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.680833] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52eda683-3df6-e616-93b8-53dcb0c62501, 'name': SearchDatastore_Task, 'duration_secs': 0.013645} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.680833] env[63538]: DEBUG oslo_concurrency.lockutils [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 622.680833] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 622.680833] env[63538]: DEBUG oslo_concurrency.lockutils [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.681116] env[63538]: DEBUG oslo_concurrency.lockutils [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.681116] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 622.681457] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4394c4af-5fee-494b-9a8b-cf9a4b25b3cd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.686771] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.615s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 622.688057] env[63538]: DEBUG nova.compute.manager [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 622.690021] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 18.460s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.696137] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 622.696389] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 622.699014] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28e9c7a6-1e33-45a2-9116-f39a02751d15 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.704718] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 622.704718] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527637c8-b570-29ef-07da-d72d7901b114" [ 622.704718] env[63538]: _type = "Task" [ 622.704718] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.720268] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527637c8-b570-29ef-07da-d72d7901b114, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.725448] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquiring lock "ee9fe572-7a17-46db-8330-4b6f632c6b2c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.726711] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "ee9fe572-7a17-46db-8330-4b6f632c6b2c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.752721] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquiring lock "refresh_cache-dbf48807-08a7-46d1-8454-42437a9f87c0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.752721] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquired lock "refresh_cache-dbf48807-08a7-46d1-8454-42437a9f87c0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.752721] env[63538]: DEBUG nova.network.neutron [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 622.752975] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Task: {'id': task-5100480, 'name': Rename_Task, 'duration_secs': 0.455897} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.753460] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 622.753827] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ca743dd0-fb43-4c1e-9d93-f7f83b0067c3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.763798] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Waiting for the task: (returnval){ [ 622.763798] env[63538]: value = "task-5100481" [ 622.763798] env[63538]: _type = "Task" [ 622.763798] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.776917] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Task: {'id': task-5100481, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.800637] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquiring lock "5421e135-9581-4f81-aa8a-2a604887a1df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.800986] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "5421e135-9581-4f81-aa8a-2a604887a1df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.845142] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquiring lock "65fc18ff-8901-40d2-8a5b-640eb9768240" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.845364] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "65fc18ff-8901-40d2-8a5b-640eb9768240" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.980173] env[63538]: DEBUG oslo_vmware.api [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Task: {'id': task-5100479, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.657085} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.980173] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 543875b5-195a-476d-a0b4-3211ceefa27f/543875b5-195a-476d-a0b4-3211ceefa27f.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 622.980173] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 622.980173] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1a90c000-721f-4118-a133-b16482d341f3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.990337] env[63538]: DEBUG oslo_vmware.api [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Waiting for the task: (returnval){ [ 622.990337] env[63538]: value = "task-5100482" [ 622.990337] env[63538]: _type = "Task" [ 622.990337] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.007507] env[63538]: DEBUG oslo_vmware.api [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Task: {'id': task-5100482, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.197807] env[63538]: INFO nova.compute.claims [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 623.204650] env[63538]: DEBUG nova.compute.utils [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 623.210152] env[63538]: DEBUG nova.compute.manager [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 623.210619] env[63538]: DEBUG nova.network.neutron [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 623.236196] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527637c8-b570-29ef-07da-d72d7901b114, 'name': SearchDatastore_Task, 'duration_secs': 0.023278} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.240055] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59b18174-25e1-4319-a693-5e9c7506ea09 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.248234] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 623.248234] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522656bc-577f-6bf0-68dc-1c59df85426c" [ 623.248234] env[63538]: _type = "Task" [ 623.248234] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.260567] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522656bc-577f-6bf0-68dc-1c59df85426c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.278964] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Task: {'id': task-5100481, 'name': PowerOnVM_Task} progress is 78%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.328601] env[63538]: DEBUG nova.network.neutron [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 623.357981] env[63538]: DEBUG nova.policy [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd00b1abc532047669787b96bd99d6923', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '36d0dd36b0834f53b21ced88df2b9097', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 623.386053] env[63538]: DEBUG nova.network.neutron [req-ac16102e-7310-46d8-9737-9f1d134ea3c6 req-5dae65a7-8aad-43a6-930a-840f8ae2d198 service nova] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Updated VIF entry in instance network info cache for port 16b1a07f-5af3-4a11-967b-acc2df708c1d. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 623.386053] env[63538]: DEBUG nova.network.neutron [req-ac16102e-7310-46d8-9737-9f1d134ea3c6 req-5dae65a7-8aad-43a6-930a-840f8ae2d198 service nova] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Updating instance_info_cache with network_info: [{"id": "16b1a07f-5af3-4a11-967b-acc2df708c1d", "address": "fa:16:3e:e4:04:09", "network": {"id": "93c6d210-dcf5-4f42-a7de-20b9e4f21717", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-124766487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81348a2052934087a4b147aad4e7eb39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16b1a07f-5a", "ovs_interfaceid": "16b1a07f-5af3-4a11-967b-acc2df708c1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.505891] env[63538]: DEBUG oslo_vmware.api [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Task: {'id': task-5100482, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086872} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.508482] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 623.508482] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e96612-ebda-4377-bb2b-c4becff5ec56 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.536952] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] 543875b5-195a-476d-a0b4-3211ceefa27f/543875b5-195a-476d-a0b4-3211ceefa27f.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 623.537703] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad86dd61-b486-4d72-b69d-007b1abc417e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.558962] env[63538]: DEBUG oslo_vmware.api [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Waiting for the task: (returnval){ [ 623.558962] env[63538]: value = "task-5100483" [ 623.558962] env[63538]: _type = "Task" [ 623.558962] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.568442] env[63538]: DEBUG oslo_vmware.api [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Task: {'id': task-5100483, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.714160] env[63538]: INFO nova.compute.resource_tracker [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Updating resource usage from migration 2e763caf-b452-499b-a6eb-169dbc8837b2 [ 623.719289] env[63538]: DEBUG nova.compute.manager [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 623.762511] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522656bc-577f-6bf0-68dc-1c59df85426c, 'name': SearchDatastore_Task, 'duration_secs': 0.027789} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.762511] env[63538]: DEBUG oslo_concurrency.lockutils [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.762845] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] ede967c0-ec3a-4f26-8290-0ee36890cd75/ede967c0-ec3a-4f26-8290-0ee36890cd75.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 623.763107] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3a773a09-6c2f-43fc-ac38-f616d31506f9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.775017] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 623.775017] env[63538]: value = "task-5100484" [ 623.775017] env[63538]: _type = "Task" [ 623.775017] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.782465] env[63538]: DEBUG oslo_vmware.api [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Task: {'id': task-5100481, 'name': PowerOnVM_Task, 'duration_secs': 0.991623} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.783939] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 623.784285] env[63538]: INFO nova.compute.manager [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Took 11.68 seconds to spawn the instance on the hypervisor. [ 623.784588] env[63538]: DEBUG nova.compute.manager [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 623.788482] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee1cc28-1591-4f66-aaef-7056aac66edd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.798291] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100484, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.888475] env[63538]: DEBUG oslo_concurrency.lockutils [req-ac16102e-7310-46d8-9737-9f1d134ea3c6 req-5dae65a7-8aad-43a6-930a-840f8ae2d198 service nova] Releasing lock "refresh_cache-543875b5-195a-476d-a0b4-3211ceefa27f" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.916788] env[63538]: DEBUG nova.network.neutron [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Updating instance_info_cache with network_info: [{"id": "38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e", "address": "fa:16:3e:5f:b1:e3", "network": {"id": "ad5bb52a-b208-49a5-986c-0bfcdffe7d94", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-907734579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5784127fe9d4eefaa1f55f0eacdb91d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38f5393e-f3", "ovs_interfaceid": "38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.010178] env[63538]: DEBUG nova.network.neutron [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Successfully created port: 7b43dc6d-d5bd-406d-8860-46abe9635ab5 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 624.071940] env[63538]: DEBUG oslo_vmware.api [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Task: {'id': task-5100483, 'name': ReconfigVM_Task, 'duration_secs': 0.426497} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.072667] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Reconfigured VM instance instance-00000011 to attach disk [datastore1] 543875b5-195a-476d-a0b4-3211ceefa27f/543875b5-195a-476d-a0b4-3211ceefa27f.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 624.073135] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-99e847fd-3a0c-465e-8896-b122e7408afb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.084177] env[63538]: DEBUG oslo_vmware.api [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Waiting for the task: (returnval){ [ 624.084177] env[63538]: value = "task-5100485" [ 624.084177] env[63538]: _type = "Task" [ 624.084177] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.098857] env[63538]: DEBUG oslo_vmware.api [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Task: {'id': task-5100485, 'name': Rename_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.160266] env[63538]: DEBUG nova.compute.manager [req-d5f3e0e7-a400-4b02-a849-32d6eab3d8dd req-040e5e4d-cdc2-4f70-9f3b-38c99276959b service nova] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Received event network-changed-33b2bb84-c893-4ee8-90de-6696ef21d830 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 624.160266] env[63538]: DEBUG nova.compute.manager [req-d5f3e0e7-a400-4b02-a849-32d6eab3d8dd req-040e5e4d-cdc2-4f70-9f3b-38c99276959b service nova] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Refreshing instance network info cache due to event network-changed-33b2bb84-c893-4ee8-90de-6696ef21d830. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 624.160620] env[63538]: DEBUG oslo_concurrency.lockutils [req-d5f3e0e7-a400-4b02-a849-32d6eab3d8dd req-040e5e4d-cdc2-4f70-9f3b-38c99276959b service nova] Acquiring lock "refresh_cache-bf54098e-91a8-403f-a6fe-b58a62daaadb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 624.160768] env[63538]: DEBUG oslo_concurrency.lockutils [req-d5f3e0e7-a400-4b02-a849-32d6eab3d8dd req-040e5e4d-cdc2-4f70-9f3b-38c99276959b service nova] Acquired lock "refresh_cache-bf54098e-91a8-403f-a6fe-b58a62daaadb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.160926] env[63538]: DEBUG nova.network.neutron [req-d5f3e0e7-a400-4b02-a849-32d6eab3d8dd req-040e5e4d-cdc2-4f70-9f3b-38c99276959b service nova] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Refreshing network info cache for port 33b2bb84-c893-4ee8-90de-6696ef21d830 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 624.291306] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100484, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.323642] env[63538]: INFO nova.compute.manager [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Took 27.95 seconds to build instance. [ 624.359600] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ac4e78-0964-4632-a383-191c6df2321c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.369394] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d499d123-63ae-4ec1-8248-7c8f9999371c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.403822] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b5a29bb-9ff9-4249-906a-ccc6618e01cd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.412420] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b62f4573-b278-4d1d-9f0f-a9dace9ca9f5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.420095] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Releasing lock "refresh_cache-dbf48807-08a7-46d1-8454-42437a9f87c0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 624.420360] env[63538]: DEBUG nova.compute.manager [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Instance network_info: |[{"id": "38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e", "address": "fa:16:3e:5f:b1:e3", "network": {"id": "ad5bb52a-b208-49a5-986c-0bfcdffe7d94", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-907734579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5784127fe9d4eefaa1f55f0eacdb91d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38f5393e-f3", "ovs_interfaceid": "38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 624.429595] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:b1:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '00a15667-7ca5-4dc9-be92-164750d87988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 624.437329] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Creating folder: Project (f5784127fe9d4eefaa1f55f0eacdb91d). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 624.438295] env[63538]: DEBUG nova.compute.provider_tree [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 624.439721] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a8421d09-8369-42bd-b702-7c6bca174ef5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.452520] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Created folder: Project (f5784127fe9d4eefaa1f55f0eacdb91d) in parent group-v992234. [ 624.452727] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Creating folder: Instances. Parent ref: group-v992291. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 624.452971] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dad6c75b-c23d-4997-8316-515fdc782888 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.466970] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Created folder: Instances in parent group-v992291. [ 624.467456] env[63538]: DEBUG oslo.service.loopingcall [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 624.467565] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 624.467809] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-98cfd432-608e-4c39-a07b-063972e53ff4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.489863] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 624.489863] env[63538]: value = "task-5100488" [ 624.489863] env[63538]: _type = "Task" [ 624.489863] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.498649] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100488, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.595447] env[63538]: DEBUG oslo_vmware.api [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Task: {'id': task-5100485, 'name': Rename_Task, 'duration_secs': 0.36222} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.596209] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 624.596209] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-006ad040-c119-4fec-81b3-abafe1287005 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.604885] env[63538]: DEBUG oslo_vmware.api [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Waiting for the task: (returnval){ [ 624.604885] env[63538]: value = "task-5100489" [ 624.604885] env[63538]: _type = "Task" [ 624.604885] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.619380] env[63538]: DEBUG oslo_vmware.api [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Task: {'id': task-5100489, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.738914] env[63538]: DEBUG nova.compute.manager [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 624.755375] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Acquiring lock "43729260-d138-4e62-9cc5-4db3ca39f5d2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.755639] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Lock "43729260-d138-4e62-9cc5-4db3ca39f5d2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.768888] env[63538]: DEBUG nova.virt.hardware [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 624.769362] env[63538]: DEBUG nova.virt.hardware [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 624.769362] env[63538]: DEBUG nova.virt.hardware [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 624.769505] env[63538]: DEBUG nova.virt.hardware [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 624.769624] env[63538]: DEBUG nova.virt.hardware [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 624.769788] env[63538]: DEBUG nova.virt.hardware [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 624.769993] env[63538]: DEBUG nova.virt.hardware [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 624.770398] env[63538]: DEBUG nova.virt.hardware [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 624.770596] env[63538]: DEBUG nova.virt.hardware [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 624.770798] env[63538]: DEBUG nova.virt.hardware [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 624.770950] env[63538]: DEBUG nova.virt.hardware [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 624.772533] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85d900b4-3e03-4c02-aee4-e43e89122ff3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.784909] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-239490c8-4185-4f7d-bfbc-d73687117327 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.793738] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100484, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.535939} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.794916] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] ede967c0-ec3a-4f26-8290-0ee36890cd75/ede967c0-ec3a-4f26-8290-0ee36890cd75.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 624.794916] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 624.798264] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-843a0951-3d24-47f7-b7ef-4b303b040583 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.815846] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "a7bb1869-5553-40d8-9c0b-366ccdef5fae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.816156] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "a7bb1869-5553-40d8-9c0b-366ccdef5fae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.818050] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 624.818050] env[63538]: value = "task-5100490" [ 624.818050] env[63538]: _type = "Task" [ 624.818050] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.823855] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4e3be727-032f-4161-bf1f-ab17fc6ae54e tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Lock "102c0463-fb64-4dda-914c-b98c8e9991ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.160s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.830763] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100490, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.946961] env[63538]: DEBUG nova.scheduler.client.report [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 625.002565] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100488, 'name': CreateVM_Task, 'duration_secs': 0.507159} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.002714] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 625.003482] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.003636] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.003984] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 625.004614] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41db1310-ebf4-4972-b764-fafd5ded6abb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.010860] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Waiting for the task: (returnval){ [ 625.010860] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b65da4-6c4e-2e10-c04a-2706752a8ee6" [ 625.010860] env[63538]: _type = "Task" [ 625.010860] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.023745] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b65da4-6c4e-2e10-c04a-2706752a8ee6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.064819] env[63538]: DEBUG nova.network.neutron [req-d5f3e0e7-a400-4b02-a849-32d6eab3d8dd req-040e5e4d-cdc2-4f70-9f3b-38c99276959b service nova] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Updated VIF entry in instance network info cache for port 33b2bb84-c893-4ee8-90de-6696ef21d830. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 625.065319] env[63538]: DEBUG nova.network.neutron [req-d5f3e0e7-a400-4b02-a849-32d6eab3d8dd req-040e5e4d-cdc2-4f70-9f3b-38c99276959b service nova] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Updating instance_info_cache with network_info: [{"id": "33b2bb84-c893-4ee8-90de-6696ef21d830", "address": "fa:16:3e:8e:14:ec", "network": {"id": "2af5d5f0-f120-4850-bd33-40951b8bbe31", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1240280342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22554cf5d2194573ba27c6236d2c3ad2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33b2bb84-c8", "ovs_interfaceid": "33b2bb84-c893-4ee8-90de-6696ef21d830", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.117607] env[63538]: DEBUG oslo_vmware.api [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Task: {'id': task-5100489, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.327308] env[63538]: DEBUG nova.compute.manager [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 625.334823] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100490, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071469} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.335142] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 625.336833] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5135c773-bc49-4249-983e-84a5354400eb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.379065] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] ede967c0-ec3a-4f26-8290-0ee36890cd75/ede967c0-ec3a-4f26-8290-0ee36890cd75.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 625.379304] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0aa9437-27bf-4847-b0f2-f70f9c3d569f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.405196] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 625.405196] env[63538]: value = "task-5100491" [ 625.405196] env[63538]: _type = "Task" [ 625.405196] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.415704] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100491, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.452820] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.763s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.452922] env[63538]: INFO nova.compute.manager [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Migrating [ 625.453111] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.453316] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquired lock "compute-rpcapi-router" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.454686] env[63538]: DEBUG oslo_concurrency.lockutils [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.824s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.454921] env[63538]: DEBUG nova.objects.instance [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Lazy-loading 'resources' on Instance uuid 0a7c34e0-1acc-4761-804a-eb9ee00fdd77 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 625.522058] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b65da4-6c4e-2e10-c04a-2706752a8ee6, 'name': SearchDatastore_Task, 'duration_secs': 0.020506} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.522531] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 625.522824] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 625.523110] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.523326] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.523566] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 625.524702] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7eea531b-bad3-4e71-9cb5-7c44a375414f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.538390] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 625.538634] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 625.539755] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-100f1aa1-5c00-4111-9b1e-9be896ea6b2e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.547789] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Waiting for the task: (returnval){ [ 625.547789] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bda36a-7d48-2328-1842-adb861f8e787" [ 625.547789] env[63538]: _type = "Task" [ 625.547789] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.559938] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bda36a-7d48-2328-1842-adb861f8e787, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.568486] env[63538]: DEBUG oslo_concurrency.lockutils [req-d5f3e0e7-a400-4b02-a849-32d6eab3d8dd req-040e5e4d-cdc2-4f70-9f3b-38c99276959b service nova] Releasing lock "refresh_cache-bf54098e-91a8-403f-a6fe-b58a62daaadb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 625.619744] env[63538]: DEBUG oslo_vmware.api [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Task: {'id': task-5100489, 'name': PowerOnVM_Task, 'duration_secs': 0.948465} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.620208] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 625.620349] env[63538]: INFO nova.compute.manager [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Took 10.94 seconds to spawn the instance on the hypervisor. [ 625.620566] env[63538]: DEBUG nova.compute.manager [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 625.622665] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3725d1-c3f8-42a8-8594-2b2e11381b5d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.864928] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.916888] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100491, 'name': ReconfigVM_Task, 'duration_secs': 0.315427} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.917619] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Reconfigured VM instance instance-00000012 to attach disk [datastore1] ede967c0-ec3a-4f26-8290-0ee36890cd75/ede967c0-ec3a-4f26-8290-0ee36890cd75.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 625.918289] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cff445c8-0ffc-4530-a404-689c9f83ca2e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.927099] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 625.927099] env[63538]: value = "task-5100492" [ 625.927099] env[63538]: _type = "Task" [ 625.927099] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.941099] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100492, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.961532] env[63538]: INFO nova.compute.rpcapi [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Automatically selected compute RPC version 6.3 from minimum service version 67 [ 625.962447] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Releasing lock "compute-rpcapi-router" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 626.060158] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bda36a-7d48-2328-1842-adb861f8e787, 'name': SearchDatastore_Task, 'duration_secs': 0.022187} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.067538] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a6f4a19-202e-4852-9fca-7fd193524e7d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.074379] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Waiting for the task: (returnval){ [ 626.074379] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5217dc75-a533-e524-f998-2bf05830a862" [ 626.074379] env[63538]: _type = "Task" [ 626.074379] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.083567] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5217dc75-a533-e524-f998-2bf05830a862, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.153737] env[63538]: INFO nova.compute.manager [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Took 29.60 seconds to build instance. [ 626.408956] env[63538]: DEBUG nova.network.neutron [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Successfully updated port: 7b43dc6d-d5bd-406d-8860-46abe9635ab5 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 626.422866] env[63538]: DEBUG nova.compute.manager [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Received event network-vif-plugged-f752fb93-15ab-4803-9e58-012b22d5f121 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 626.423546] env[63538]: DEBUG oslo_concurrency.lockutils [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] Acquiring lock "ede967c0-ec3a-4f26-8290-0ee36890cd75-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.425693] env[63538]: DEBUG oslo_concurrency.lockutils [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] Lock "ede967c0-ec3a-4f26-8290-0ee36890cd75-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.425693] env[63538]: DEBUG oslo_concurrency.lockutils [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] Lock "ede967c0-ec3a-4f26-8290-0ee36890cd75-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.425693] env[63538]: DEBUG nova.compute.manager [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] No waiting events found dispatching network-vif-plugged-f752fb93-15ab-4803-9e58-012b22d5f121 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 626.425693] env[63538]: WARNING nova.compute.manager [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Received unexpected event network-vif-plugged-f752fb93-15ab-4803-9e58-012b22d5f121 for instance with vm_state building and task_state spawning. [ 626.425693] env[63538]: DEBUG nova.compute.manager [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Received event network-changed-f752fb93-15ab-4803-9e58-012b22d5f121 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 626.425882] env[63538]: DEBUG nova.compute.manager [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Refreshing instance network info cache due to event network-changed-f752fb93-15ab-4803-9e58-012b22d5f121. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 626.425882] env[63538]: DEBUG oslo_concurrency.lockutils [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] Acquiring lock "refresh_cache-ede967c0-ec3a-4f26-8290-0ee36890cd75" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 626.425882] env[63538]: DEBUG oslo_concurrency.lockutils [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] Acquired lock "refresh_cache-ede967c0-ec3a-4f26-8290-0ee36890cd75" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.425882] env[63538]: DEBUG nova.network.neutron [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Refreshing network info cache for port f752fb93-15ab-4803-9e58-012b22d5f121 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 626.450067] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100492, 'name': Rename_Task, 'duration_secs': 0.149461} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.451118] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 626.451771] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7600e2e4-40a3-4d39-b1a1-1c5119933c74 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.462832] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 626.462832] env[63538]: value = "task-5100493" [ 626.462832] env[63538]: _type = "Task" [ 626.462832] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.474871] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100493, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.485519] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "refresh_cache-2e1b0bc7-3909-48e2-b9be-26822a57ee67" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 626.485519] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquired lock "refresh_cache-2e1b0bc7-3909-48e2-b9be-26822a57ee67" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.485519] env[63538]: DEBUG nova.network.neutron [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 626.591463] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5217dc75-a533-e524-f998-2bf05830a862, 'name': SearchDatastore_Task, 'duration_secs': 0.030741} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.591463] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 626.592398] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] dbf48807-08a7-46d1-8454-42437a9f87c0/dbf48807-08a7-46d1-8454-42437a9f87c0.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 626.594981] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8661f958-b448-42f9-93ae-62798386695b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.602800] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Waiting for the task: (returnval){ [ 626.602800] env[63538]: value = "task-5100494" [ 626.602800] env[63538]: _type = "Task" [ 626.602800] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.619666] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100494, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.653120] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f77422b-b341-4c20-b0fa-4069a36480b6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.657765] env[63538]: DEBUG oslo_concurrency.lockutils [None req-631a9e77-8680-4384-a739-384a41718244 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Lock "543875b5-195a-476d-a0b4-3211ceefa27f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.750s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.663663] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0477f30-af94-4e08-a299-220974a078b2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.704526] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6dec99f-b264-4c65-9a0c-99c8c3afc848 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.715174] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45fa3278-66e7-4998-817f-c9c3653baad3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.730960] env[63538]: DEBUG nova.compute.provider_tree [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 626.914282] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Acquiring lock "refresh_cache-7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 626.914282] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Acquired lock "refresh_cache-7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.914282] env[63538]: DEBUG nova.network.neutron [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 626.981221] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100493, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.118654] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100494, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.137192] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Acquiring lock "4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.137414] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Lock "4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 627.159367] env[63538]: DEBUG nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 627.236024] env[63538]: DEBUG nova.scheduler.client.report [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 627.479028] env[63538]: DEBUG oslo_vmware.api [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100493, 'name': PowerOnVM_Task, 'duration_secs': 0.547718} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.479474] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 627.479741] env[63538]: INFO nova.compute.manager [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Took 10.11 seconds to spawn the instance on the hypervisor. [ 627.479865] env[63538]: DEBUG nova.compute.manager [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 627.480752] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d534b850-d9f3-4f76-a8e3-643236ca1099 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.507369] env[63538]: DEBUG nova.network.neutron [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 627.626028] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100494, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.630029} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.626028] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] dbf48807-08a7-46d1-8454-42437a9f87c0/dbf48807-08a7-46d1-8454-42437a9f87c0.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 627.626028] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 627.626028] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-030fc2b7-4e26-476e-8dab-5c86dd0c4e49 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.635154] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Waiting for the task: (returnval){ [ 627.635154] env[63538]: value = "task-5100495" [ 627.635154] env[63538]: _type = "Task" [ 627.635154] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.646741] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100495, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.691930] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.703036] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "99de5226-a27c-47c5-90fa-5f0c7204df1c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.703036] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "99de5226-a27c-47c5-90fa-5f0c7204df1c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 627.744431] env[63538]: DEBUG oslo_concurrency.lockutils [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.290s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 627.747049] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.984s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 627.747935] env[63538]: DEBUG nova.objects.instance [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Lazy-loading 'resources' on Instance uuid d99b7b8e-633f-4fba-bce6-9b8e9e9892d1 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 627.770182] env[63538]: INFO nova.scheduler.client.report [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Deleted allocations for instance 0a7c34e0-1acc-4761-804a-eb9ee00fdd77 [ 627.794871] env[63538]: DEBUG nova.network.neutron [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Updated VIF entry in instance network info cache for port f752fb93-15ab-4803-9e58-012b22d5f121. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 627.794871] env[63538]: DEBUG nova.network.neutron [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Updating instance_info_cache with network_info: [{"id": "f752fb93-15ab-4803-9e58-012b22d5f121", "address": "fa:16:3e:0f:19:27", "network": {"id": "4aa7976c-5900-4be2-a5e2-8d641ac24be9", "bridge": "br-int", "label": "tempest-ServersTestJSON-119331503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea05f3fb4676466bb2a286f5a2fefb8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf752fb93-15", "ovs_interfaceid": "f752fb93-15ab-4803-9e58-012b22d5f121", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.802231] env[63538]: DEBUG nova.network.neutron [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Updating instance_info_cache with network_info: [{"id": "47d19b83-6292-46e2-835f-1198ef52374c", "address": "fa:16:3e:af:6f:01", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47d19b83-62", "ovs_interfaceid": "47d19b83-6292-46e2-835f-1198ef52374c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.899892] env[63538]: DEBUG nova.network.neutron [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Updating instance_info_cache with network_info: [{"id": "7b43dc6d-d5bd-406d-8860-46abe9635ab5", "address": "fa:16:3e:58:4b:58", "network": {"id": "374f6662-54be-4863-92bf-f55c9e648f16", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1981842704-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36d0dd36b0834f53b21ced88df2b9097", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3261e15f-7e45-4516-acfd-341bab16e3cf", "external-id": "nsx-vlan-transportzone-783", "segmentation_id": 783, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b43dc6d-d5", "ovs_interfaceid": "7b43dc6d-d5bd-406d-8860-46abe9635ab5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.993985] env[63538]: DEBUG nova.compute.manager [req-ee9965e9-cd04-47aa-980b-a929e31547b5 req-aa88dc76-c235-4051-9b21-4deef411320d service nova] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Received event network-changed-514b9391-5894-4419-800a-e06658f8a44b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 627.994603] env[63538]: DEBUG nova.compute.manager [req-ee9965e9-cd04-47aa-980b-a929e31547b5 req-aa88dc76-c235-4051-9b21-4deef411320d service nova] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Refreshing instance network info cache due to event network-changed-514b9391-5894-4419-800a-e06658f8a44b. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 627.994603] env[63538]: DEBUG oslo_concurrency.lockutils [req-ee9965e9-cd04-47aa-980b-a929e31547b5 req-aa88dc76-c235-4051-9b21-4deef411320d service nova] Acquiring lock "refresh_cache-1e33b68e-8509-4ec4-8ec4-dc758aae9a5a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 627.994603] env[63538]: DEBUG oslo_concurrency.lockutils [req-ee9965e9-cd04-47aa-980b-a929e31547b5 req-aa88dc76-c235-4051-9b21-4deef411320d service nova] Acquired lock "refresh_cache-1e33b68e-8509-4ec4-8ec4-dc758aae9a5a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.996229] env[63538]: DEBUG nova.network.neutron [req-ee9965e9-cd04-47aa-980b-a929e31547b5 req-aa88dc76-c235-4051-9b21-4deef411320d service nova] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Refreshing network info cache for port 514b9391-5894-4419-800a-e06658f8a44b {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 628.013314] env[63538]: INFO nova.compute.manager [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Took 30.88 seconds to build instance. [ 628.149255] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100495, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068131} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.149255] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 628.150015] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad195fd8-c5d9-426b-bebd-1505a0c72a0c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.176283] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] dbf48807-08a7-46d1-8454-42437a9f87c0/dbf48807-08a7-46d1-8454-42437a9f87c0.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 628.176957] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65f655c7-fd72-4cd8-a68b-69b992f4a11f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.203021] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Waiting for the task: (returnval){ [ 628.203021] env[63538]: value = "task-5100496" [ 628.203021] env[63538]: _type = "Task" [ 628.203021] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.212231] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100496, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.288792] env[63538]: DEBUG oslo_concurrency.lockutils [None req-452cd08e-f1e3-4df1-889e-40eaaf4feef8 tempest-ServerExternalEventsTest-22789654 tempest-ServerExternalEventsTest-22789654-project-member] Lock "0a7c34e0-1acc-4761-804a-eb9ee00fdd77" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.263s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.298642] env[63538]: DEBUG oslo_concurrency.lockutils [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] Releasing lock "refresh_cache-ede967c0-ec3a-4f26-8290-0ee36890cd75" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 628.302068] env[63538]: DEBUG nova.compute.manager [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Received event network-vif-plugged-38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 628.302068] env[63538]: DEBUG oslo_concurrency.lockutils [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] Acquiring lock "dbf48807-08a7-46d1-8454-42437a9f87c0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.302068] env[63538]: DEBUG oslo_concurrency.lockutils [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] Lock "dbf48807-08a7-46d1-8454-42437a9f87c0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.302068] env[63538]: DEBUG oslo_concurrency.lockutils [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] Lock "dbf48807-08a7-46d1-8454-42437a9f87c0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.302068] env[63538]: DEBUG nova.compute.manager [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] No waiting events found dispatching network-vif-plugged-38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 628.302381] env[63538]: WARNING nova.compute.manager [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Received unexpected event network-vif-plugged-38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e for instance with vm_state building and task_state spawning. [ 628.302381] env[63538]: DEBUG nova.compute.manager [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Received event network-changed-38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 628.302381] env[63538]: DEBUG nova.compute.manager [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Refreshing instance network info cache due to event network-changed-38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 628.302381] env[63538]: DEBUG oslo_concurrency.lockutils [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] Acquiring lock "refresh_cache-dbf48807-08a7-46d1-8454-42437a9f87c0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.302381] env[63538]: DEBUG oslo_concurrency.lockutils [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] Acquired lock "refresh_cache-dbf48807-08a7-46d1-8454-42437a9f87c0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.302559] env[63538]: DEBUG nova.network.neutron [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Refreshing network info cache for port 38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 628.311065] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Releasing lock "refresh_cache-2e1b0bc7-3909-48e2-b9be-26822a57ee67" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 628.402416] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Releasing lock "refresh_cache-7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 628.402740] env[63538]: DEBUG nova.compute.manager [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Instance network_info: |[{"id": "7b43dc6d-d5bd-406d-8860-46abe9635ab5", "address": "fa:16:3e:58:4b:58", "network": {"id": "374f6662-54be-4863-92bf-f55c9e648f16", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1981842704-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36d0dd36b0834f53b21ced88df2b9097", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3261e15f-7e45-4516-acfd-341bab16e3cf", "external-id": "nsx-vlan-transportzone-783", "segmentation_id": 783, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b43dc6d-d5", "ovs_interfaceid": "7b43dc6d-d5bd-406d-8860-46abe9635ab5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 628.403694] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:4b:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3261e15f-7e45-4516-acfd-341bab16e3cf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7b43dc6d-d5bd-406d-8860-46abe9635ab5', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 628.414634] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Creating folder: Project (36d0dd36b0834f53b21ced88df2b9097). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 628.414634] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4c9bab3f-2c78-4d3a-87f9-6d6848f7f80c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.429419] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Created folder: Project (36d0dd36b0834f53b21ced88df2b9097) in parent group-v992234. [ 628.430249] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Creating folder: Instances. Parent ref: group-v992294. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 628.430249] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-46fec6b8-788e-4cca-88c7-211c77fcbf77 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.443473] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Created folder: Instances in parent group-v992294. [ 628.445799] env[63538]: DEBUG oslo.service.loopingcall [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 628.445799] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 628.445799] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6c96c9a6-9e81-47bf-ada8-77ec0fc39d67 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.471967] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 628.471967] env[63538]: value = "task-5100499" [ 628.471967] env[63538]: _type = "Task" [ 628.471967] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.481594] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100499, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.516581] env[63538]: DEBUG oslo_concurrency.lockutils [None req-25c88e09-236d-4cfc-88d1-7914b99a9b1b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "ede967c0-ec3a-4f26-8290-0ee36890cd75" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.017s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.719744] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100496, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.885187] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d8aa80c-769e-4a95-8061-924d334d7a92 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.894275] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77202295-a495-470c-9037-405693140d63 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.947104] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751987c6-6045-4739-ac3e-9e1367609936 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.962175] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a0ffc2-8d46-4221-9407-ed729b52c907 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.979582] env[63538]: DEBUG nova.compute.provider_tree [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 628.991121] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100499, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.019582] env[63538]: DEBUG nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 629.145738] env[63538]: DEBUG nova.network.neutron [req-ee9965e9-cd04-47aa-980b-a929e31547b5 req-aa88dc76-c235-4051-9b21-4deef411320d service nova] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Updated VIF entry in instance network info cache for port 514b9391-5894-4419-800a-e06658f8a44b. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 629.146112] env[63538]: DEBUG nova.network.neutron [req-ee9965e9-cd04-47aa-980b-a929e31547b5 req-aa88dc76-c235-4051-9b21-4deef411320d service nova] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Updating instance_info_cache with network_info: [{"id": "514b9391-5894-4419-800a-e06658f8a44b", "address": "fa:16:3e:c5:61:96", "network": {"id": "c0e827ce-4abc-4178-9811-36625c1d6ebc", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1894083828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.233", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6480e3bc216427d939223b9e3b6a21b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69351262-8d39-441a-85ba-3a78df436d17", "external-id": "nsx-vlan-transportzone-205", "segmentation_id": 205, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap514b9391-58", "ovs_interfaceid": "514b9391-5894-4419-800a-e06658f8a44b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.215836] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100496, 'name': ReconfigVM_Task, 'duration_secs': 0.754886} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.218428] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Reconfigured VM instance instance-00000013 to attach disk [datastore1] dbf48807-08a7-46d1-8454-42437a9f87c0/dbf48807-08a7-46d1-8454-42437a9f87c0.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 629.218816] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1f8fb6e4-a651-4a7d-8702-6a8cc1960c0c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.227795] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Waiting for the task: (returnval){ [ 629.227795] env[63538]: value = "task-5100500" [ 629.227795] env[63538]: _type = "Task" [ 629.227795] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.242639] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100500, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.483756] env[63538]: DEBUG nova.scheduler.client.report [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 629.497645] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100499, 'name': CreateVM_Task, 'duration_secs': 0.543882} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.498229] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 629.499648] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.499648] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.499648] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 629.499915] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9de73e2b-086f-4c6c-bbc4-e30b2dbc94ac {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.512451] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Waiting for the task: (returnval){ [ 629.512451] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]529e99fa-5dad-93ef-a8ea-52028317c6ef" [ 629.512451] env[63538]: _type = "Task" [ 629.512451] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.532396] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]529e99fa-5dad-93ef-a8ea-52028317c6ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.548500] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.573505] env[63538]: DEBUG nova.network.neutron [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Updated VIF entry in instance network info cache for port 38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 629.573892] env[63538]: DEBUG nova.network.neutron [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Updating instance_info_cache with network_info: [{"id": "38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e", "address": "fa:16:3e:5f:b1:e3", "network": {"id": "ad5bb52a-b208-49a5-986c-0bfcdffe7d94", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-907734579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5784127fe9d4eefaa1f55f0eacdb91d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38f5393e-f3", "ovs_interfaceid": "38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.650623] env[63538]: DEBUG oslo_concurrency.lockutils [req-ee9965e9-cd04-47aa-980b-a929e31547b5 req-aa88dc76-c235-4051-9b21-4deef411320d service nova] Releasing lock "refresh_cache-1e33b68e-8509-4ec4-8ec4-dc758aae9a5a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 629.741894] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100500, 'name': Rename_Task, 'duration_secs': 0.359977} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.745130] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 629.745130] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-96f960ed-682d-4b39-97f7-59ea82a481ca {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.752277] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Waiting for the task: (returnval){ [ 629.752277] env[63538]: value = "task-5100501" [ 629.752277] env[63538]: _type = "Task" [ 629.752277] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.762833] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100501, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.829979] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac851a9e-43b5-4b08-ace6-f3751f03dcc3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.855622] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Updating instance '2e1b0bc7-3909-48e2-b9be-26822a57ee67' progress to 0 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 629.993149] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.245s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 629.994969] env[63538]: DEBUG oslo_concurrency.lockutils [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.897s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.996172] env[63538]: INFO nova.compute.claims [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 630.033461] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]529e99fa-5dad-93ef-a8ea-52028317c6ef, 'name': SearchDatastore_Task, 'duration_secs': 0.015409} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.033461] env[63538]: INFO nova.scheduler.client.report [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Deleted allocations for instance d99b7b8e-633f-4fba-bce6-9b8e9e9892d1 [ 630.039183] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.039183] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 630.039183] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 630.039183] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.039529] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 630.039529] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9cf627b9-f858-42fe-925c-cb5ccee181b6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.064886] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 630.065106] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 630.065896] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50998636-b8e9-4cd3-8d0e-9dbd81cc4f8c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.075373] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Waiting for the task: (returnval){ [ 630.075373] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520443e6-0835-e8db-34e3-0f5d8a4337a7" [ 630.075373] env[63538]: _type = "Task" [ 630.075373] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.081512] env[63538]: DEBUG oslo_concurrency.lockutils [req-a98a4c4c-d470-4d01-b3af-3c647b8510e3 req-c7d05be1-0144-4fee-a72f-fee4da78469b service nova] Releasing lock "refresh_cache-dbf48807-08a7-46d1-8454-42437a9f87c0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.087466] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520443e6-0835-e8db-34e3-0f5d8a4337a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.246200] env[63538]: DEBUG nova.compute.manager [req-fa635319-17bd-4dd9-8608-e2c5d0524af4 req-03083d3e-0c69-41a8-a5b7-7186982b8921 service nova] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Received event network-vif-plugged-7b43dc6d-d5bd-406d-8860-46abe9635ab5 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 630.246409] env[63538]: DEBUG oslo_concurrency.lockutils [req-fa635319-17bd-4dd9-8608-e2c5d0524af4 req-03083d3e-0c69-41a8-a5b7-7186982b8921 service nova] Acquiring lock "7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 630.246709] env[63538]: DEBUG oslo_concurrency.lockutils [req-fa635319-17bd-4dd9-8608-e2c5d0524af4 req-03083d3e-0c69-41a8-a5b7-7186982b8921 service nova] Lock "7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.246856] env[63538]: DEBUG oslo_concurrency.lockutils [req-fa635319-17bd-4dd9-8608-e2c5d0524af4 req-03083d3e-0c69-41a8-a5b7-7186982b8921 service nova] Lock "7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 630.253571] env[63538]: DEBUG nova.compute.manager [req-fa635319-17bd-4dd9-8608-e2c5d0524af4 req-03083d3e-0c69-41a8-a5b7-7186982b8921 service nova] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] No waiting events found dispatching network-vif-plugged-7b43dc6d-d5bd-406d-8860-46abe9635ab5 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 630.253571] env[63538]: WARNING nova.compute.manager [req-fa635319-17bd-4dd9-8608-e2c5d0524af4 req-03083d3e-0c69-41a8-a5b7-7186982b8921 service nova] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Received unexpected event network-vif-plugged-7b43dc6d-d5bd-406d-8860-46abe9635ab5 for instance with vm_state building and task_state spawning. [ 630.253571] env[63538]: DEBUG nova.compute.manager [req-fa635319-17bd-4dd9-8608-e2c5d0524af4 req-03083d3e-0c69-41a8-a5b7-7186982b8921 service nova] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Received event network-changed-7b43dc6d-d5bd-406d-8860-46abe9635ab5 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 630.253571] env[63538]: DEBUG nova.compute.manager [req-fa635319-17bd-4dd9-8608-e2c5d0524af4 req-03083d3e-0c69-41a8-a5b7-7186982b8921 service nova] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Refreshing instance network info cache due to event network-changed-7b43dc6d-d5bd-406d-8860-46abe9635ab5. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 630.253571] env[63538]: DEBUG oslo_concurrency.lockutils [req-fa635319-17bd-4dd9-8608-e2c5d0524af4 req-03083d3e-0c69-41a8-a5b7-7186982b8921 service nova] Acquiring lock "refresh_cache-7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 630.253892] env[63538]: DEBUG oslo_concurrency.lockutils [req-fa635319-17bd-4dd9-8608-e2c5d0524af4 req-03083d3e-0c69-41a8-a5b7-7186982b8921 service nova] Acquired lock "refresh_cache-7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.253892] env[63538]: DEBUG nova.network.neutron [req-fa635319-17bd-4dd9-8608-e2c5d0524af4 req-03083d3e-0c69-41a8-a5b7-7186982b8921 service nova] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Refreshing network info cache for port 7b43dc6d-d5bd-406d-8860-46abe9635ab5 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 630.273763] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100501, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.369290] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 630.369290] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-352c34c6-ab25-486a-b215-e319ba48b394 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.379390] env[63538]: DEBUG oslo_vmware.api [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 630.379390] env[63538]: value = "task-5100502" [ 630.379390] env[63538]: _type = "Task" [ 630.379390] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.398717] env[63538]: DEBUG oslo_vmware.api [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100502, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.558062] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e900e921-0a1f-41f5-9543-3aff1273a655 tempest-ServerDiagnosticsV248Test-1422138911 tempest-ServerDiagnosticsV248Test-1422138911-project-member] Lock "d99b7b8e-633f-4fba-bce6-9b8e9e9892d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.626s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 630.590990] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520443e6-0835-e8db-34e3-0f5d8a4337a7, 'name': SearchDatastore_Task, 'duration_secs': 0.025778} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.591869] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef7c8904-fc08-4421-948a-c7adbf33d913 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.598737] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Waiting for the task: (returnval){ [ 630.598737] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5299b70e-897c-b619-5b4c-8c2d09b4fa18" [ 630.598737] env[63538]: _type = "Task" [ 630.598737] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.612439] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5299b70e-897c-b619-5b4c-8c2d09b4fa18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.775270] env[63538]: DEBUG oslo_vmware.api [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100501, 'name': PowerOnVM_Task, 'duration_secs': 0.662046} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.775270] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 630.775270] env[63538]: INFO nova.compute.manager [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Took 10.84 seconds to spawn the instance on the hypervisor. [ 630.775488] env[63538]: DEBUG nova.compute.manager [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 630.776318] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21ad9ee4-ee57-469f-9447-1cf3a493ad3d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.827241] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0b5f79ad-08b9-4663-bb67-2f73c7234810 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "cebb39b7-1e2d-4460-8281-22a75355f4d2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 630.827241] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0b5f79ad-08b9-4663-bb67-2f73c7234810 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "cebb39b7-1e2d-4460-8281-22a75355f4d2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.895647] env[63538]: DEBUG oslo_vmware.api [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100502, 'name': PowerOffVM_Task, 'duration_secs': 0.262407} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.896137] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 630.896411] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Updating instance '2e1b0bc7-3909-48e2-b9be-26822a57ee67' progress to 17 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 631.117759] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5299b70e-897c-b619-5b4c-8c2d09b4fa18, 'name': SearchDatastore_Task, 'duration_secs': 0.014046} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.120748] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 631.121034] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b/7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 631.121514] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fdce7080-d504-4a55-839a-7f3a3c5707c2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.130124] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Waiting for the task: (returnval){ [ 631.130124] env[63538]: value = "task-5100503" [ 631.130124] env[63538]: _type = "Task" [ 631.130124] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.145648] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': task-5100503, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.307022] env[63538]: INFO nova.compute.manager [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Took 32.00 seconds to build instance. [ 631.405584] env[63538]: DEBUG nova.virt.hardware [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 631.405863] env[63538]: DEBUG nova.virt.hardware [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 631.406011] env[63538]: DEBUG nova.virt.hardware [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 631.406223] env[63538]: DEBUG nova.virt.hardware [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 631.406449] env[63538]: DEBUG nova.virt.hardware [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 631.406541] env[63538]: DEBUG nova.virt.hardware [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 631.406744] env[63538]: DEBUG nova.virt.hardware [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 631.406932] env[63538]: DEBUG nova.virt.hardware [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 631.407122] env[63538]: DEBUG nova.virt.hardware [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 631.407295] env[63538]: DEBUG nova.virt.hardware [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 631.407487] env[63538]: DEBUG nova.virt.hardware [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 631.422225] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c4a6776-20c1-4652-9912-1ece42ff63e6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.441336] env[63538]: DEBUG oslo_vmware.api [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 631.441336] env[63538]: value = "task-5100504" [ 631.441336] env[63538]: _type = "Task" [ 631.441336] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.454797] env[63538]: DEBUG oslo_vmware.api [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100504, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.601125] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df497385-40f4-442d-87da-30dae303bfe7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.612472] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-175f9e27-2cea-4105-98fc-ea2b1b5dd578 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.664259] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c191450-8074-45ba-8bc3-16456222ee44 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.668668] env[63538]: DEBUG nova.compute.manager [req-cb2fed20-8e1b-40be-a678-50e624a68950 req-1f91f9c2-0f60-4032-ac2f-38e65d0a86cd service nova] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Received event network-changed-4fd02a9a-ba01-4841-a942-ca1b96503c0e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 631.668843] env[63538]: DEBUG nova.compute.manager [req-cb2fed20-8e1b-40be-a678-50e624a68950 req-1f91f9c2-0f60-4032-ac2f-38e65d0a86cd service nova] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Refreshing instance network info cache due to event network-changed-4fd02a9a-ba01-4841-a942-ca1b96503c0e. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 631.669627] env[63538]: DEBUG oslo_concurrency.lockutils [req-cb2fed20-8e1b-40be-a678-50e624a68950 req-1f91f9c2-0f60-4032-ac2f-38e65d0a86cd service nova] Acquiring lock "refresh_cache-102c0463-fb64-4dda-914c-b98c8e9991ad" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 631.669627] env[63538]: DEBUG oslo_concurrency.lockutils [req-cb2fed20-8e1b-40be-a678-50e624a68950 req-1f91f9c2-0f60-4032-ac2f-38e65d0a86cd service nova] Acquired lock "refresh_cache-102c0463-fb64-4dda-914c-b98c8e9991ad" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.669627] env[63538]: DEBUG nova.network.neutron [req-cb2fed20-8e1b-40be-a678-50e624a68950 req-1f91f9c2-0f60-4032-ac2f-38e65d0a86cd service nova] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Refreshing network info cache for port 4fd02a9a-ba01-4841-a942-ca1b96503c0e {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 631.682976] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': task-5100503, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.684387] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f916c88-f350-477f-91c8-347a8b80e1b0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.704362] env[63538]: DEBUG nova.compute.provider_tree [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 631.812974] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f32e1336-4667-4bc8-bef3-1979822a7739 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lock "dbf48807-08a7-46d1-8454-42437a9f87c0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.424s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 631.956530] env[63538]: DEBUG oslo_vmware.api [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100504, 'name': ReconfigVM_Task, 'duration_secs': 0.345729} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.956530] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Updating instance '2e1b0bc7-3909-48e2-b9be-26822a57ee67' progress to 33 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 631.990092] env[63538]: DEBUG nova.network.neutron [req-fa635319-17bd-4dd9-8608-e2c5d0524af4 req-03083d3e-0c69-41a8-a5b7-7186982b8921 service nova] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Updated VIF entry in instance network info cache for port 7b43dc6d-d5bd-406d-8860-46abe9635ab5. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 631.990182] env[63538]: DEBUG nova.network.neutron [req-fa635319-17bd-4dd9-8608-e2c5d0524af4 req-03083d3e-0c69-41a8-a5b7-7186982b8921 service nova] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Updating instance_info_cache with network_info: [{"id": "7b43dc6d-d5bd-406d-8860-46abe9635ab5", "address": "fa:16:3e:58:4b:58", "network": {"id": "374f6662-54be-4863-92bf-f55c9e648f16", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1981842704-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36d0dd36b0834f53b21ced88df2b9097", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3261e15f-7e45-4516-acfd-341bab16e3cf", "external-id": "nsx-vlan-transportzone-783", "segmentation_id": 783, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b43dc6d-d5", "ovs_interfaceid": "7b43dc6d-d5bd-406d-8860-46abe9635ab5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.169342] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': task-5100503, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.868828} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.169342] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b/7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 632.169342] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 632.169342] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5558315c-2bb4-473b-bc73-7c3dc1b1c105 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.181447] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Waiting for the task: (returnval){ [ 632.181447] env[63538]: value = "task-5100505" [ 632.181447] env[63538]: _type = "Task" [ 632.181447] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.192794] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': task-5100505, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.210224] env[63538]: DEBUG nova.scheduler.client.report [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 632.315399] env[63538]: DEBUG nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 632.466478] env[63538]: DEBUG nova.virt.hardware [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 632.467550] env[63538]: DEBUG nova.virt.hardware [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 632.468179] env[63538]: DEBUG nova.virt.hardware [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 632.468554] env[63538]: DEBUG nova.virt.hardware [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 632.469222] env[63538]: DEBUG nova.virt.hardware [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 632.470054] env[63538]: DEBUG nova.virt.hardware [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 632.471231] env[63538]: DEBUG nova.virt.hardware [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 632.471231] env[63538]: DEBUG nova.virt.hardware [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 632.471231] env[63538]: DEBUG nova.virt.hardware [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 632.471231] env[63538]: DEBUG nova.virt.hardware [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 632.471570] env[63538]: DEBUG nova.virt.hardware [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 632.482861] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Reconfiguring VM instance instance-00000009 to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 632.482861] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bee9db71-d75f-4671-8281-b3f25e814d4c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.496597] env[63538]: DEBUG oslo_concurrency.lockutils [req-fa635319-17bd-4dd9-8608-e2c5d0524af4 req-03083d3e-0c69-41a8-a5b7-7186982b8921 service nova] Releasing lock "refresh_cache-7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 632.507374] env[63538]: DEBUG oslo_vmware.api [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 632.507374] env[63538]: value = "task-5100506" [ 632.507374] env[63538]: _type = "Task" [ 632.507374] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.519416] env[63538]: DEBUG oslo_vmware.api [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100506, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.693998] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': task-5100505, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.1152} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.695136] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 632.696746] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff90659d-17df-4af6-8ead-a0852dd30f8e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.717392] env[63538]: DEBUG oslo_concurrency.lockutils [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.723s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 632.719539] env[63538]: DEBUG nova.compute.manager [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 632.735019] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Reconfiguring VM instance instance-00000014 to attach disk [datastore2] 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b/7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 632.735019] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.075s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 632.735361] env[63538]: INFO nova.compute.claims [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 632.738102] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ce15604-2cc0-4240-96d8-56630b591341 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.762060] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Waiting for the task: (returnval){ [ 632.762060] env[63538]: value = "task-5100507" [ 632.762060] env[63538]: _type = "Task" [ 632.762060] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.776852] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': task-5100507, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.844271] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 633.003036] env[63538]: DEBUG nova.network.neutron [req-cb2fed20-8e1b-40be-a678-50e624a68950 req-1f91f9c2-0f60-4032-ac2f-38e65d0a86cd service nova] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Updated VIF entry in instance network info cache for port 4fd02a9a-ba01-4841-a942-ca1b96503c0e. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 633.003901] env[63538]: DEBUG nova.network.neutron [req-cb2fed20-8e1b-40be-a678-50e624a68950 req-1f91f9c2-0f60-4032-ac2f-38e65d0a86cd service nova] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Updating instance_info_cache with network_info: [{"id": "4fd02a9a-ba01-4841-a942-ca1b96503c0e", "address": "fa:16:3e:b7:48:f2", "network": {"id": "22f4bf41-624c-46c3-8d4b-9e72e652bacb", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-829996620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "540ca40795e54cfb8e38e203bba99ba0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fd02a9a-ba", "ovs_interfaceid": "4fd02a9a-ba01-4841-a942-ca1b96503c0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.022830] env[63538]: DEBUG oslo_vmware.api [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100506, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.235159] env[63538]: DEBUG nova.compute.utils [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 633.238278] env[63538]: DEBUG nova.compute.manager [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 633.238529] env[63538]: DEBUG nova.network.neutron [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 633.281309] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': task-5100507, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.511467] env[63538]: DEBUG oslo_concurrency.lockutils [req-cb2fed20-8e1b-40be-a678-50e624a68950 req-1f91f9c2-0f60-4032-ac2f-38e65d0a86cd service nova] Releasing lock "refresh_cache-102c0463-fb64-4dda-914c-b98c8e9991ad" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 633.525365] env[63538]: DEBUG oslo_vmware.api [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100506, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.631982] env[63538]: DEBUG nova.policy [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f91d4ce5a5724fb7b785591ae831506d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c1f0c999ede418c866074d9276050ff', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 633.746437] env[63538]: DEBUG nova.compute.manager [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 633.776554] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': task-5100507, 'name': ReconfigVM_Task, 'duration_secs': 0.947996} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.776882] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Reconfigured VM instance instance-00000014 to attach disk [datastore2] 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b/7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 633.778260] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0897f3d0-af32-4b50-bb9a-ed56d7334e5f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.787238] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Waiting for the task: (returnval){ [ 633.787238] env[63538]: value = "task-5100508" [ 633.787238] env[63538]: _type = "Task" [ 633.787238] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.804442] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': task-5100508, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.026686] env[63538]: DEBUG oslo_vmware.api [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100506, 'name': ReconfigVM_Task, 'duration_secs': 1.195284} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.026686] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Reconfigured VM instance instance-00000009 to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 634.030480] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e6fbc0c-f2a9-4d0f-9409-c70c17e14877 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.063930] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Reconfiguring VM instance instance-00000009 to attach disk [datastore2] 2e1b0bc7-3909-48e2-b9be-26822a57ee67/2e1b0bc7-3909-48e2-b9be-26822a57ee67.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 634.068053] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9746f51a-6d60-44c9-ad06-6ed931f1396e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.095690] env[63538]: DEBUG oslo_vmware.api [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 634.095690] env[63538]: value = "task-5100509" [ 634.095690] env[63538]: _type = "Task" [ 634.095690] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.110432] env[63538]: DEBUG oslo_vmware.api [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100509, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.173547] env[63538]: DEBUG nova.compute.manager [req-7a56cdad-e950-4695-a5ed-d77bc0096a27 req-10f6a78d-ce32-497d-9f81-6d929d2dcc7c service nova] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Received event network-changed-16b1a07f-5af3-4a11-967b-acc2df708c1d {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 634.173767] env[63538]: DEBUG nova.compute.manager [req-7a56cdad-e950-4695-a5ed-d77bc0096a27 req-10f6a78d-ce32-497d-9f81-6d929d2dcc7c service nova] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Refreshing instance network info cache due to event network-changed-16b1a07f-5af3-4a11-967b-acc2df708c1d. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 634.173978] env[63538]: DEBUG oslo_concurrency.lockutils [req-7a56cdad-e950-4695-a5ed-d77bc0096a27 req-10f6a78d-ce32-497d-9f81-6d929d2dcc7c service nova] Acquiring lock "refresh_cache-543875b5-195a-476d-a0b4-3211ceefa27f" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.178155] env[63538]: DEBUG oslo_concurrency.lockutils [req-7a56cdad-e950-4695-a5ed-d77bc0096a27 req-10f6a78d-ce32-497d-9f81-6d929d2dcc7c service nova] Acquired lock "refresh_cache-543875b5-195a-476d-a0b4-3211ceefa27f" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.178155] env[63538]: DEBUG nova.network.neutron [req-7a56cdad-e950-4695-a5ed-d77bc0096a27 req-10f6a78d-ce32-497d-9f81-6d929d2dcc7c service nova] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Refreshing network info cache for port 16b1a07f-5af3-4a11-967b-acc2df708c1d {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 634.303152] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': task-5100508, 'name': Rename_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.430907] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f5cd898-385d-49e7-bb76-4aaa5bfb5372 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.441847] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8965001-92ba-49e8-8eaf-80eb63f15ccc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.485020] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a75552f9-9f3c-49a6-b26c-4bcf4e6ce4b4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.500661] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bbd1251-53dc-4fe3-9e08-2aa4e8115646 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.519820] env[63538]: DEBUG nova.compute.provider_tree [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 634.618182] env[63538]: DEBUG oslo_vmware.api [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100509, 'name': ReconfigVM_Task, 'duration_secs': 0.298247} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.618182] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Reconfigured VM instance instance-00000009 to attach disk [datastore2] 2e1b0bc7-3909-48e2-b9be-26822a57ee67/2e1b0bc7-3909-48e2-b9be-26822a57ee67.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 634.618182] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Updating instance '2e1b0bc7-3909-48e2-b9be-26822a57ee67' progress to 50 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 634.754501] env[63538]: DEBUG nova.network.neutron [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Successfully created port: 2b33d2bc-399c-4a17-826e-f6425766c6fd {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 634.757738] env[63538]: DEBUG nova.compute.manager [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 634.802580] env[63538]: DEBUG nova.virt.hardware [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 634.802922] env[63538]: DEBUG nova.virt.hardware [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 634.803036] env[63538]: DEBUG nova.virt.hardware [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 634.803256] env[63538]: DEBUG nova.virt.hardware [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 634.803508] env[63538]: DEBUG nova.virt.hardware [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 634.803741] env[63538]: DEBUG nova.virt.hardware [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 634.804031] env[63538]: DEBUG nova.virt.hardware [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 634.804229] env[63538]: DEBUG nova.virt.hardware [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 634.804390] env[63538]: DEBUG nova.virt.hardware [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 634.804583] env[63538]: DEBUG nova.virt.hardware [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 634.804831] env[63538]: DEBUG nova.virt.hardware [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 634.806534] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eafb13f0-5b19-4fb8-ad5e-16837ccb627c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.814206] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': task-5100508, 'name': Rename_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.823198] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-019d5db5-d786-4279-83b5-861d0bf8c408 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.026312] env[63538]: DEBUG nova.scheduler.client.report [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 635.126737] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-848dd768-fbe5-4c43-9f23-4f26ff3f4477 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.146767] env[63538]: DEBUG nova.network.neutron [req-7a56cdad-e950-4695-a5ed-d77bc0096a27 req-10f6a78d-ce32-497d-9f81-6d929d2dcc7c service nova] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Updated VIF entry in instance network info cache for port 16b1a07f-5af3-4a11-967b-acc2df708c1d. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 635.146767] env[63538]: DEBUG nova.network.neutron [req-7a56cdad-e950-4695-a5ed-d77bc0096a27 req-10f6a78d-ce32-497d-9f81-6d929d2dcc7c service nova] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Updating instance_info_cache with network_info: [{"id": "16b1a07f-5af3-4a11-967b-acc2df708c1d", "address": "fa:16:3e:e4:04:09", "network": {"id": "93c6d210-dcf5-4f42-a7de-20b9e4f21717", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-124766487-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81348a2052934087a4b147aad4e7eb39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb3425ea-72e7-41e3-92a7-820db9ec4661", "external-id": "nsx-vlan-transportzone-651", "segmentation_id": 651, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16b1a07f-5a", "ovs_interfaceid": "16b1a07f-5af3-4a11-967b-acc2df708c1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.175051] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cca10f77-0759-4d3a-8986-b4f1f262f687 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.197587] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Updating instance '2e1b0bc7-3909-48e2-b9be-26822a57ee67' progress to 67 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 635.308294] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': task-5100508, 'name': Rename_Task, 'duration_secs': 1.037889} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.308976] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 635.309293] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9d8eb55c-f5b0-4f84-91d8-db06723ac81c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.321759] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Waiting for the task: (returnval){ [ 635.321759] env[63538]: value = "task-5100510" [ 635.321759] env[63538]: _type = "Task" [ 635.321759] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.337301] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': task-5100510, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.535060] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.802s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 635.535656] env[63538]: DEBUG nova.compute.manager [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 635.539652] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.323s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.539901] env[63538]: DEBUG nova.objects.instance [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Lazy-loading 'resources' on Instance uuid e3ba860b-afb8-4843-9d99-049dce205f9f {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 635.652677] env[63538]: DEBUG oslo_concurrency.lockutils [req-7a56cdad-e950-4695-a5ed-d77bc0096a27 req-10f6a78d-ce32-497d-9f81-6d929d2dcc7c service nova] Releasing lock "refresh_cache-543875b5-195a-476d-a0b4-3211ceefa27f" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.803129] env[63538]: DEBUG nova.network.neutron [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Port 47d19b83-6292-46e2-835f-1198ef52374c binding to destination host cpu-1 is already ACTIVE {{(pid=63538) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 635.839987] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': task-5100510, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.041667] env[63538]: DEBUG nova.compute.utils [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 636.044045] env[63538]: DEBUG nova.compute.manager [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 636.044045] env[63538]: DEBUG nova.network.neutron [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 636.129030] env[63538]: DEBUG nova.policy [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e81b8e6639bb436ca91e68b2e7248f92', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c5e6ed681ed4078bd9115b30f419d9a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 636.478902] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': task-5100510, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.568384] env[63538]: DEBUG nova.compute.manager [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 636.838345] env[63538]: DEBUG oslo_vmware.api [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': task-5100510, 'name': PowerOnVM_Task, 'duration_secs': 1.039331} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.838736] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 636.838813] env[63538]: INFO nova.compute.manager [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Took 12.10 seconds to spawn the instance on the hypervisor. [ 636.838992] env[63538]: DEBUG nova.compute.manager [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 636.839828] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a9c52d-c05e-4130-81bf-d7d1cc95d0a5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.856957] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e33a51e-8206-4efc-bc11-2a722ecd7446 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.866905] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b0eac0-eb07-41de-953e-6e3b86d7e5ae {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.901944] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f5302a4-85ed-4719-bcf3-fb1253394f9f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.912232] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1316697-fa10-451d-8dc5-9364661a57a5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.932366] env[63538]: DEBUG nova.compute.provider_tree [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 636.998006] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "2e1b0bc7-3909-48e2-b9be-26822a57ee67-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.998006] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "2e1b0bc7-3909-48e2-b9be-26822a57ee67-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.998237] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "2e1b0bc7-3909-48e2-b9be-26822a57ee67-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.140361] env[63538]: DEBUG nova.network.neutron [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Successfully created port: 6768804b-2e12-482a-b0c9-b886eaee9afb {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 637.362096] env[63538]: INFO nova.compute.manager [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Took 36.81 seconds to build instance. [ 637.436404] env[63538]: DEBUG nova.scheduler.client.report [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 637.450631] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "61068d41-5f5d-4ee5-b546-71da13eff93d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.450631] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "61068d41-5f5d-4ee5-b546-71da13eff93d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.562881] env[63538]: DEBUG nova.compute.manager [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 637.594311] env[63538]: DEBUG nova.virt.hardware [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 637.594900] env[63538]: DEBUG nova.virt.hardware [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 637.594900] env[63538]: DEBUG nova.virt.hardware [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 637.595063] env[63538]: DEBUG nova.virt.hardware [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 637.595590] env[63538]: DEBUG nova.virt.hardware [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 637.595782] env[63538]: DEBUG nova.virt.hardware [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 637.596030] env[63538]: DEBUG nova.virt.hardware [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 637.596209] env[63538]: DEBUG nova.virt.hardware [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 637.596715] env[63538]: DEBUG nova.virt.hardware [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 637.596909] env[63538]: DEBUG nova.virt.hardware [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 637.597205] env[63538]: DEBUG nova.virt.hardware [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 637.598388] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0acbbccc-c53d-49a9-aa4b-7ca34caf2b85 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.609490] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f383a5-8937-46ee-9156-1b9f8b587392 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.847627] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.847923] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.864237] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ccdb6eb4-2150-4212-9515-93c7fc019b21 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Lock "7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.389s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.950477] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.411s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.956708] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.106s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.958637] env[63538]: INFO nova.compute.claims [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 637.998790] env[63538]: INFO nova.scheduler.client.report [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Deleted allocations for instance e3ba860b-afb8-4843-9d99-049dce205f9f [ 638.089225] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "refresh_cache-2e1b0bc7-3909-48e2-b9be-26822a57ee67" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 638.089593] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquired lock "refresh_cache-2e1b0bc7-3909-48e2-b9be-26822a57ee67" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.089809] env[63538]: DEBUG nova.network.neutron [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 638.099251] env[63538]: DEBUG nova.network.neutron [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Successfully updated port: 2b33d2bc-399c-4a17-826e-f6425766c6fd {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 638.144270] env[63538]: DEBUG oslo_concurrency.lockutils [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Acquiring lock "fd650fdc-6b49-4051-8267-bbd1f0cb86f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.144270] env[63538]: DEBUG oslo_concurrency.lockutils [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Lock "fd650fdc-6b49-4051-8267-bbd1f0cb86f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.368836] env[63538]: DEBUG nova.compute.manager [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 638.519753] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b5c7ea09-b406-4412-94c6-4c1eb2699dcb tempest-TenantUsagesTestJSON-987109940 tempest-TenantUsagesTestJSON-987109940-project-member] Lock "e3ba860b-afb8-4843-9d99-049dce205f9f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.831s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.603081] env[63538]: DEBUG oslo_concurrency.lockutils [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "refresh_cache-bd222761-92aa-4f2c-a752-ead9c498ee7a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 638.603081] env[63538]: DEBUG oslo_concurrency.lockutils [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquired lock "refresh_cache-bd222761-92aa-4f2c-a752-ead9c498ee7a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.603081] env[63538]: DEBUG nova.network.neutron [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 638.902466] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.081066] env[63538]: DEBUG nova.network.neutron [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Updating instance_info_cache with network_info: [{"id": "47d19b83-6292-46e2-835f-1198ef52374c", "address": "fa:16:3e:af:6f:01", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47d19b83-62", "ovs_interfaceid": "47d19b83-6292-46e2-835f-1198ef52374c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.158947] env[63538]: DEBUG nova.network.neutron [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 639.195626] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquiring lock "6f29f063-ddb5-491a-a1a0-7c9ed65a1718" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.195626] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lock "6f29f063-ddb5-491a-a1a0-7c9ed65a1718" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.413739] env[63538]: DEBUG nova.compute.manager [req-66413065-bdd1-4d1a-a2a0-72a1bb44f131 req-ca36680e-8d8e-4326-8be5-5666745d0520 service nova] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Received event network-vif-plugged-2b33d2bc-399c-4a17-826e-f6425766c6fd {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 639.413939] env[63538]: DEBUG oslo_concurrency.lockutils [req-66413065-bdd1-4d1a-a2a0-72a1bb44f131 req-ca36680e-8d8e-4326-8be5-5666745d0520 service nova] Acquiring lock "bd222761-92aa-4f2c-a752-ead9c498ee7a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.414148] env[63538]: DEBUG oslo_concurrency.lockutils [req-66413065-bdd1-4d1a-a2a0-72a1bb44f131 req-ca36680e-8d8e-4326-8be5-5666745d0520 service nova] Lock "bd222761-92aa-4f2c-a752-ead9c498ee7a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.414489] env[63538]: DEBUG oslo_concurrency.lockutils [req-66413065-bdd1-4d1a-a2a0-72a1bb44f131 req-ca36680e-8d8e-4326-8be5-5666745d0520 service nova] Lock "bd222761-92aa-4f2c-a752-ead9c498ee7a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.414489] env[63538]: DEBUG nova.compute.manager [req-66413065-bdd1-4d1a-a2a0-72a1bb44f131 req-ca36680e-8d8e-4326-8be5-5666745d0520 service nova] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] No waiting events found dispatching network-vif-plugged-2b33d2bc-399c-4a17-826e-f6425766c6fd {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 639.414595] env[63538]: WARNING nova.compute.manager [req-66413065-bdd1-4d1a-a2a0-72a1bb44f131 req-ca36680e-8d8e-4326-8be5-5666745d0520 service nova] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Received unexpected event network-vif-plugged-2b33d2bc-399c-4a17-826e-f6425766c6fd for instance with vm_state building and task_state spawning. [ 639.554309] env[63538]: DEBUG nova.network.neutron [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Updating instance_info_cache with network_info: [{"id": "2b33d2bc-399c-4a17-826e-f6425766c6fd", "address": "fa:16:3e:98:e9:0f", "network": {"id": "2801e625-b23c-455a-80ae-80f2a4f34148", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1431071033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1f0c999ede418c866074d9276050ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b33d2bc-39", "ovs_interfaceid": "2b33d2bc-399c-4a17-826e-f6425766c6fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.557818] env[63538]: DEBUG nova.compute.manager [req-ce8f4757-02d0-44b6-b558-b144dfbd12de req-9abd365d-89cf-4fd9-9987-650e92ce1734 service nova] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Received event network-changed-2b33d2bc-399c-4a17-826e-f6425766c6fd {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 639.558029] env[63538]: DEBUG nova.compute.manager [req-ce8f4757-02d0-44b6-b558-b144dfbd12de req-9abd365d-89cf-4fd9-9987-650e92ce1734 service nova] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Refreshing instance network info cache due to event network-changed-2b33d2bc-399c-4a17-826e-f6425766c6fd. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 639.558341] env[63538]: DEBUG oslo_concurrency.lockutils [req-ce8f4757-02d0-44b6-b558-b144dfbd12de req-9abd365d-89cf-4fd9-9987-650e92ce1734 service nova] Acquiring lock "refresh_cache-bd222761-92aa-4f2c-a752-ead9c498ee7a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 639.588985] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Releasing lock "refresh_cache-2e1b0bc7-3909-48e2-b9be-26822a57ee67" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 639.804403] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fa35fd6-a58c-4e46-b480-cb052dc733e2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.815198] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c880417d-fa2c-424c-a6b8-4b52f2453f0a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.852285] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4242e74b-13b1-42fb-a603-b2927fd2bfa1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.862154] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc008d6c-bede-4daf-9318-bfdcd4c8d9e1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.878849] env[63538]: DEBUG nova.compute.provider_tree [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 639.945355] env[63538]: DEBUG oslo_concurrency.lockutils [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Acquiring lock "bb56950a-3e25-4fb9-9f84-f735e26adc42" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.945355] env[63538]: DEBUG oslo_concurrency.lockutils [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Lock "bb56950a-3e25-4fb9-9f84-f735e26adc42" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.955651] env[63538]: DEBUG oslo_concurrency.lockutils [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Acquiring lock "080b11d7-a756-45a0-81d5-b5fcc2662ac9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.955876] env[63538]: DEBUG oslo_concurrency.lockutils [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Lock "080b11d7-a756-45a0-81d5-b5fcc2662ac9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.981278] env[63538]: DEBUG nova.network.neutron [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Successfully updated port: 6768804b-2e12-482a-b0c9-b886eaee9afb {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 640.060155] env[63538]: DEBUG oslo_concurrency.lockutils [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Releasing lock "refresh_cache-bd222761-92aa-4f2c-a752-ead9c498ee7a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 640.060521] env[63538]: DEBUG nova.compute.manager [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Instance network_info: |[{"id": "2b33d2bc-399c-4a17-826e-f6425766c6fd", "address": "fa:16:3e:98:e9:0f", "network": {"id": "2801e625-b23c-455a-80ae-80f2a4f34148", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1431071033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1f0c999ede418c866074d9276050ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b33d2bc-39", "ovs_interfaceid": "2b33d2bc-399c-4a17-826e-f6425766c6fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 640.060838] env[63538]: DEBUG oslo_concurrency.lockutils [req-ce8f4757-02d0-44b6-b558-b144dfbd12de req-9abd365d-89cf-4fd9-9987-650e92ce1734 service nova] Acquired lock "refresh_cache-bd222761-92aa-4f2c-a752-ead9c498ee7a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.061913] env[63538]: DEBUG nova.network.neutron [req-ce8f4757-02d0-44b6-b558-b144dfbd12de req-9abd365d-89cf-4fd9-9987-650e92ce1734 service nova] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Refreshing network info cache for port 2b33d2bc-399c-4a17-826e-f6425766c6fd {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 640.062551] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:e9:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6edb8eae-1113-49d0-84f7-9fd9f82b26fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2b33d2bc-399c-4a17-826e-f6425766c6fd', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 640.069898] env[63538]: DEBUG oslo.service.loopingcall [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 640.070923] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 640.071566] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-876e94fe-0d4a-4aac-aa25-ce838405895c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.096021] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 640.096021] env[63538]: value = "task-5100511" [ 640.096021] env[63538]: _type = "Task" [ 640.096021] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.107805] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100511, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.131066] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bcdb91f-0f2c-4f39-b34a-8da1bb097f8b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.151238] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6674d4-7fba-4704-acbd-9b8ade5576fa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.160492] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Updating instance '2e1b0bc7-3909-48e2-b9be-26822a57ee67' progress to 83 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 640.389241] env[63538]: DEBUG nova.scheduler.client.report [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 640.486299] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquiring lock "refresh_cache-c065263a-fd40-4b44-a68e-0e03248d0bc0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 640.486299] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquired lock "refresh_cache-c065263a-fd40-4b44-a68e-0e03248d0bc0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.486299] env[63538]: DEBUG nova.network.neutron [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 640.607899] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100511, 'name': CreateVM_Task, 'duration_secs': 0.380578} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.608090] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 640.608791] env[63538]: DEBUG oslo_concurrency.lockutils [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 640.608944] env[63538]: DEBUG oslo_concurrency.lockutils [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.609292] env[63538]: DEBUG oslo_concurrency.lockutils [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 640.610430] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eaa70870-5e94-4f02-b700-67b510f06606 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.615887] env[63538]: DEBUG oslo_vmware.api [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 640.615887] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52167d73-f10f-4832-0c9f-126ba1a65e6f" [ 640.615887] env[63538]: _type = "Task" [ 640.615887] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.628858] env[63538]: DEBUG oslo_vmware.api [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52167d73-f10f-4832-0c9f-126ba1a65e6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.638429] env[63538]: DEBUG nova.compute.manager [req-21e86c71-bad1-465e-afbd-a0c1b96908d0 req-9af8938f-b61b-49a9-a7f3-4a3a73e97533 service nova] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Received event network-vif-plugged-6768804b-2e12-482a-b0c9-b886eaee9afb {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 640.638429] env[63538]: DEBUG oslo_concurrency.lockutils [req-21e86c71-bad1-465e-afbd-a0c1b96908d0 req-9af8938f-b61b-49a9-a7f3-4a3a73e97533 service nova] Acquiring lock "c065263a-fd40-4b44-a68e-0e03248d0bc0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.638429] env[63538]: DEBUG oslo_concurrency.lockutils [req-21e86c71-bad1-465e-afbd-a0c1b96908d0 req-9af8938f-b61b-49a9-a7f3-4a3a73e97533 service nova] Lock "c065263a-fd40-4b44-a68e-0e03248d0bc0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.638429] env[63538]: DEBUG oslo_concurrency.lockutils [req-21e86c71-bad1-465e-afbd-a0c1b96908d0 req-9af8938f-b61b-49a9-a7f3-4a3a73e97533 service nova] Lock "c065263a-fd40-4b44-a68e-0e03248d0bc0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.638429] env[63538]: DEBUG nova.compute.manager [req-21e86c71-bad1-465e-afbd-a0c1b96908d0 req-9af8938f-b61b-49a9-a7f3-4a3a73e97533 service nova] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] No waiting events found dispatching network-vif-plugged-6768804b-2e12-482a-b0c9-b886eaee9afb {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 640.638731] env[63538]: WARNING nova.compute.manager [req-21e86c71-bad1-465e-afbd-a0c1b96908d0 req-9af8938f-b61b-49a9-a7f3-4a3a73e97533 service nova] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Received unexpected event network-vif-plugged-6768804b-2e12-482a-b0c9-b886eaee9afb for instance with vm_state building and task_state spawning. [ 640.676817] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 640.676817] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8c9fa76b-d054-432f-81e3-9377bb1e8243 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.684960] env[63538]: DEBUG oslo_vmware.api [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 640.684960] env[63538]: value = "task-5100512" [ 640.684960] env[63538]: _type = "Task" [ 640.684960] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.705539] env[63538]: DEBUG oslo_vmware.api [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100512, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.899196] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.945s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.900095] env[63538]: DEBUG nova.compute.manager [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 640.902462] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.392s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.902708] env[63538]: DEBUG nova.objects.instance [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Lazy-loading 'resources' on Instance uuid 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 641.088705] env[63538]: DEBUG nova.network.neutron [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 641.130789] env[63538]: DEBUG oslo_vmware.api [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52167d73-f10f-4832-0c9f-126ba1a65e6f, 'name': SearchDatastore_Task, 'duration_secs': 0.018538} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.130789] env[63538]: DEBUG oslo_concurrency.lockutils [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 641.130789] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 641.130789] env[63538]: DEBUG oslo_concurrency.lockutils [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 641.131101] env[63538]: DEBUG oslo_concurrency.lockutils [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.131101] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 641.131101] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-93d3a796-c393-4930-8a16-5fbd1b9e0efd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.140781] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 641.140976] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 641.141737] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd120e6f-74c0-458f-8bc2-c0ad4f7f005c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.145944] env[63538]: DEBUG nova.network.neutron [req-ce8f4757-02d0-44b6-b558-b144dfbd12de req-9abd365d-89cf-4fd9-9987-650e92ce1734 service nova] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Updated VIF entry in instance network info cache for port 2b33d2bc-399c-4a17-826e-f6425766c6fd. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 641.146605] env[63538]: DEBUG nova.network.neutron [req-ce8f4757-02d0-44b6-b558-b144dfbd12de req-9abd365d-89cf-4fd9-9987-650e92ce1734 service nova] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Updating instance_info_cache with network_info: [{"id": "2b33d2bc-399c-4a17-826e-f6425766c6fd", "address": "fa:16:3e:98:e9:0f", "network": {"id": "2801e625-b23c-455a-80ae-80f2a4f34148", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1431071033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1f0c999ede418c866074d9276050ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b33d2bc-39", "ovs_interfaceid": "2b33d2bc-399c-4a17-826e-f6425766c6fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.151899] env[63538]: DEBUG oslo_vmware.api [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 641.151899] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5227c23c-f396-a385-b56c-b36af2d19358" [ 641.151899] env[63538]: _type = "Task" [ 641.151899] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.165668] env[63538]: DEBUG oslo_vmware.api [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5227c23c-f396-a385-b56c-b36af2d19358, 'name': SearchDatastore_Task, 'duration_secs': 0.009968} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.166704] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1816a84-93b3-4159-8e17-55ab5418ff28 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.175022] env[63538]: DEBUG oslo_vmware.api [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 641.175022] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5255766a-b382-62d9-159b-d37fb02ccab0" [ 641.175022] env[63538]: _type = "Task" [ 641.175022] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.183295] env[63538]: DEBUG oslo_vmware.api [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5255766a-b382-62d9-159b-d37fb02ccab0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.195884] env[63538]: DEBUG oslo_vmware.api [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100512, 'name': PowerOnVM_Task, 'duration_secs': 0.431245} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.195884] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 641.195884] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-aa434cec-a1e8-4b83-a69c-b4e61e4b61fc tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Updating instance '2e1b0bc7-3909-48e2-b9be-26822a57ee67' progress to 100 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 641.406174] env[63538]: DEBUG nova.compute.utils [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 641.407696] env[63538]: DEBUG nova.compute.manager [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 641.408717] env[63538]: DEBUG nova.network.neutron [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 641.440270] env[63538]: DEBUG nova.network.neutron [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Updating instance_info_cache with network_info: [{"id": "6768804b-2e12-482a-b0c9-b886eaee9afb", "address": "fa:16:3e:01:87:f1", "network": {"id": "4fd42fb1-fcc5-467d-88b7-1a5ab3297631", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-9303923-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c5e6ed681ed4078bd9115b30f419d9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6768804b-2e", "ovs_interfaceid": "6768804b-2e12-482a-b0c9-b886eaee9afb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.614810] env[63538]: DEBUG nova.policy [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9873cd990aba452b8b9ef7a8b67f7f53', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd7a98c2190944e4284f2c4f02cee8ca2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 641.649235] env[63538]: DEBUG oslo_concurrency.lockutils [req-ce8f4757-02d0-44b6-b558-b144dfbd12de req-9abd365d-89cf-4fd9-9987-650e92ce1734 service nova] Releasing lock "refresh_cache-bd222761-92aa-4f2c-a752-ead9c498ee7a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 641.693396] env[63538]: DEBUG oslo_vmware.api [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5255766a-b382-62d9-159b-d37fb02ccab0, 'name': SearchDatastore_Task, 'duration_secs': 0.009735} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.693579] env[63538]: DEBUG oslo_concurrency.lockutils [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 641.694307] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] bd222761-92aa-4f2c-a752-ead9c498ee7a/bd222761-92aa-4f2c-a752-ead9c498ee7a.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 641.694783] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ccb384d3-e7b4-4968-a1c3-eabc85a11cef {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.711795] env[63538]: DEBUG oslo_vmware.api [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 641.711795] env[63538]: value = "task-5100513" [ 641.711795] env[63538]: _type = "Task" [ 641.711795] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.719488] env[63538]: DEBUG oslo_vmware.api [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100513, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.911698] env[63538]: DEBUG nova.compute.manager [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 641.943913] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Releasing lock "refresh_cache-c065263a-fd40-4b44-a68e-0e03248d0bc0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 641.944313] env[63538]: DEBUG nova.compute.manager [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Instance network_info: |[{"id": "6768804b-2e12-482a-b0c9-b886eaee9afb", "address": "fa:16:3e:01:87:f1", "network": {"id": "4fd42fb1-fcc5-467d-88b7-1a5ab3297631", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-9303923-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c5e6ed681ed4078bd9115b30f419d9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6768804b-2e", "ovs_interfaceid": "6768804b-2e12-482a-b0c9-b886eaee9afb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 641.944797] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:87:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b399c74-1411-408a-b4cd-84e268ae83fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6768804b-2e12-482a-b0c9-b886eaee9afb', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 641.958477] env[63538]: DEBUG oslo.service.loopingcall [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 641.963223] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 641.963764] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-46ce0e36-57ed-4522-8aa7-62b3908c60af {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.997026] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 641.997026] env[63538]: value = "task-5100514" [ 641.997026] env[63538]: _type = "Task" [ 641.997026] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.008584] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100514, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.026916] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1af6795-c65b-4c4c-ace5-9e01da09ebd6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.037169] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61aeac79-335f-4af4-b0bd-fe133b18f4e0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.077343] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec8f2bb3-940f-4864-9d1e-1bb6c5abb859 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.088748] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f6f860-21db-48e8-aad2-ee93169fc2fc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.105275] env[63538]: DEBUG nova.compute.provider_tree [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 642.224484] env[63538]: DEBUG oslo_vmware.api [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100513, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.483641] env[63538]: DEBUG nova.network.neutron [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Successfully created port: 204795e5-710d-4501-95e6-1353e467aded {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 642.507483] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100514, 'name': CreateVM_Task, 'duration_secs': 0.412849} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.507786] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 642.508379] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.508625] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.508935] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 642.509276] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5f8e705-0d5e-4163-86ff-5e024a26d161 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.515566] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 642.515566] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526b51a4-461f-be50-f357-a4844c852ed7" [ 642.515566] env[63538]: _type = "Task" [ 642.515566] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.525569] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526b51a4-461f-be50-f357-a4844c852ed7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.608682] env[63538]: DEBUG nova.scheduler.client.report [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 642.741305] env[63538]: DEBUG oslo_vmware.api [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100513, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.589583} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.742521] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] bd222761-92aa-4f2c-a752-ead9c498ee7a/bd222761-92aa-4f2c-a752-ead9c498ee7a.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 642.742879] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 642.743236] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-55927431-f484-44e3-89a4-cc1c3401c566 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.754882] env[63538]: DEBUG oslo_vmware.api [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 642.754882] env[63538]: value = "task-5100515" [ 642.754882] env[63538]: _type = "Task" [ 642.754882] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.765698] env[63538]: DEBUG oslo_vmware.api [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100515, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.887870] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Acquiring lock "4e89aa25-fb4a-430d-ab87-feff57b73780" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.888739] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Lock "4e89aa25-fb4a-430d-ab87-feff57b73780" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.921843] env[63538]: DEBUG nova.compute.manager [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 642.955225] env[63538]: DEBUG nova.virt.hardware [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 642.955646] env[63538]: DEBUG nova.virt.hardware [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 642.955917] env[63538]: DEBUG nova.virt.hardware [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 642.956272] env[63538]: DEBUG nova.virt.hardware [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 642.956535] env[63538]: DEBUG nova.virt.hardware [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 642.956801] env[63538]: DEBUG nova.virt.hardware [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 642.957250] env[63538]: DEBUG nova.virt.hardware [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 642.957560] env[63538]: DEBUG nova.virt.hardware [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 642.957865] env[63538]: DEBUG nova.virt.hardware [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 642.958169] env[63538]: DEBUG nova.virt.hardware [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 642.958494] env[63538]: DEBUG nova.virt.hardware [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 642.960887] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0e09b3-6abf-4f7d-9960-0c67787fe2a1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.974432] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f495e14-9a71-4111-92cf-707fcd2becb0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.032150] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526b51a4-461f-be50-f357-a4844c852ed7, 'name': SearchDatastore_Task, 'duration_secs': 0.010158} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.032568] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.032943] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 643.033253] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 643.033400] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.033579] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 643.033871] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3eba5a29-bdc3-47a5-8c99-3b885ef03699 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.046089] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 643.046089] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 643.046089] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4c247ee-4663-4b5b-86b8-091ee39b78b6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.056167] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 643.056167] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52567db0-39fc-d664-14c4-18cacc4e4fc9" [ 643.056167] env[63538]: _type = "Task" [ 643.056167] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.066550] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52567db0-39fc-d664-14c4-18cacc4e4fc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.117018] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.213s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 643.118159] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.300s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 643.119562] env[63538]: INFO nova.compute.claims [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 643.190783] env[63538]: INFO nova.scheduler.client.report [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Deleted allocations for instance 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf [ 643.242490] env[63538]: DEBUG nova.compute.manager [req-56cfb458-7bec-4b72-bfe7-fb3fded8da6c req-07210f7f-885f-41f8-995c-504d0e0ad0b5 service nova] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Received event network-changed-6768804b-2e12-482a-b0c9-b886eaee9afb {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 643.242490] env[63538]: DEBUG nova.compute.manager [req-56cfb458-7bec-4b72-bfe7-fb3fded8da6c req-07210f7f-885f-41f8-995c-504d0e0ad0b5 service nova] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Refreshing instance network info cache due to event network-changed-6768804b-2e12-482a-b0c9-b886eaee9afb. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 643.242895] env[63538]: DEBUG oslo_concurrency.lockutils [req-56cfb458-7bec-4b72-bfe7-fb3fded8da6c req-07210f7f-885f-41f8-995c-504d0e0ad0b5 service nova] Acquiring lock "refresh_cache-c065263a-fd40-4b44-a68e-0e03248d0bc0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 643.242895] env[63538]: DEBUG oslo_concurrency.lockutils [req-56cfb458-7bec-4b72-bfe7-fb3fded8da6c req-07210f7f-885f-41f8-995c-504d0e0ad0b5 service nova] Acquired lock "refresh_cache-c065263a-fd40-4b44-a68e-0e03248d0bc0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.243552] env[63538]: DEBUG nova.network.neutron [req-56cfb458-7bec-4b72-bfe7-fb3fded8da6c req-07210f7f-885f-41f8-995c-504d0e0ad0b5 service nova] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Refreshing network info cache for port 6768804b-2e12-482a-b0c9-b886eaee9afb {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 643.274359] env[63538]: DEBUG oslo_vmware.api [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100515, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074233} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.274603] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 643.275658] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ca2c51a-9dbc-420b-83be-59174ac4aee0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.307470] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Reconfiguring VM instance instance-00000015 to attach disk [datastore2] bd222761-92aa-4f2c-a752-ead9c498ee7a/bd222761-92aa-4f2c-a752-ead9c498ee7a.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 643.308212] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42f99180-8a76-42e5-9d67-8ffa5edea00f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.339356] env[63538]: DEBUG oslo_vmware.api [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 643.339356] env[63538]: value = "task-5100516" [ 643.339356] env[63538]: _type = "Task" [ 643.339356] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.353619] env[63538]: DEBUG oslo_vmware.api [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100516, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.564633] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52567db0-39fc-d664-14c4-18cacc4e4fc9, 'name': SearchDatastore_Task, 'duration_secs': 0.011261} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.565681] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebcef16e-c495-48a3-b2eb-474f2eeef36e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.572829] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 643.572829] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a3cf47-22dd-e7f0-1fa0-281589e883dd" [ 643.572829] env[63538]: _type = "Task" [ 643.572829] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.587598] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a3cf47-22dd-e7f0-1fa0-281589e883dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.666677] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d93de4e6-73dc-4bbf-b478-65d61266f365 tempest-InstanceActionsNegativeTestJSON-1496350076 tempest-InstanceActionsNegativeTestJSON-1496350076-project-member] Lock "10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.075s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 643.852272] env[63538]: DEBUG oslo_vmware.api [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100516, 'name': ReconfigVM_Task, 'duration_secs': 0.4971} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.852272] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Reconfigured VM instance instance-00000015 to attach disk [datastore2] bd222761-92aa-4f2c-a752-ead9c498ee7a/bd222761-92aa-4f2c-a752-ead9c498ee7a.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 643.852865] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-67dff4da-58be-45c4-bbc6-1fe7e5ac8a03 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.861331] env[63538]: DEBUG oslo_vmware.api [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 643.861331] env[63538]: value = "task-5100517" [ 643.861331] env[63538]: _type = "Task" [ 643.861331] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.874539] env[63538]: DEBUG oslo_vmware.api [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100517, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.086831] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a3cf47-22dd-e7f0-1fa0-281589e883dd, 'name': SearchDatastore_Task, 'duration_secs': 0.011249} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.087151] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.087477] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] c065263a-fd40-4b44-a68e-0e03248d0bc0/c065263a-fd40-4b44-a68e-0e03248d0bc0.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 644.087687] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5317fdfa-6e45-4e4f-8317-a393e473f9a0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.099579] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 644.099579] env[63538]: value = "task-5100518" [ 644.099579] env[63538]: _type = "Task" [ 644.099579] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.110514] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100518, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.376465] env[63538]: DEBUG oslo_vmware.api [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100517, 'name': Rename_Task, 'duration_secs': 0.34583} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.381278] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 644.382177] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-71ff6d11-44cd-4be7-b6fc-aa49f6291e08 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.391239] env[63538]: DEBUG oslo_vmware.api [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 644.391239] env[63538]: value = "task-5100519" [ 644.391239] env[63538]: _type = "Task" [ 644.391239] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.409400] env[63538]: DEBUG oslo_vmware.api [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100519, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.508742] env[63538]: DEBUG nova.network.neutron [req-56cfb458-7bec-4b72-bfe7-fb3fded8da6c req-07210f7f-885f-41f8-995c-504d0e0ad0b5 service nova] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Updated VIF entry in instance network info cache for port 6768804b-2e12-482a-b0c9-b886eaee9afb. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 644.509058] env[63538]: DEBUG nova.network.neutron [req-56cfb458-7bec-4b72-bfe7-fb3fded8da6c req-07210f7f-885f-41f8-995c-504d0e0ad0b5 service nova] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Updating instance_info_cache with network_info: [{"id": "6768804b-2e12-482a-b0c9-b886eaee9afb", "address": "fa:16:3e:01:87:f1", "network": {"id": "4fd42fb1-fcc5-467d-88b7-1a5ab3297631", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-9303923-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c5e6ed681ed4078bd9115b30f419d9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6768804b-2e", "ovs_interfaceid": "6768804b-2e12-482a-b0c9-b886eaee9afb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.613119] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100518, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.742100] env[63538]: DEBUG oslo_concurrency.lockutils [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Acquiring lock "736b110e-7265-42cc-9c9b-35f57c466b0c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.742324] env[63538]: DEBUG oslo_concurrency.lockutils [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Lock "736b110e-7265-42cc-9c9b-35f57c466b0c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.845075] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fc9ab23c-7acd-4fce-8b28-1d03b5624126 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "2e1b0bc7-3909-48e2-b9be-26822a57ee67" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.845229] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fc9ab23c-7acd-4fce-8b28-1d03b5624126 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "2e1b0bc7-3909-48e2-b9be-26822a57ee67" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.845414] env[63538]: DEBUG nova.compute.manager [None req-fc9ab23c-7acd-4fce-8b28-1d03b5624126 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Going to confirm migration 1 {{(pid=63538) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 644.903131] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c07afe-2c92-4d63-b525-b65b38ab75c9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.914423] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90f6885e-f447-4211-9d8a-aa39e35e45ed {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.919698] env[63538]: DEBUG oslo_vmware.api [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100519, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.949964] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f14bd8-a1fa-4ad3-b95e-ee7b6d211738 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.961098] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cfb1f9a-625e-404b-b600-4ccff16ef17f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.975737] env[63538]: DEBUG nova.compute.provider_tree [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 645.019475] env[63538]: DEBUG oslo_concurrency.lockutils [req-56cfb458-7bec-4b72-bfe7-fb3fded8da6c req-07210f7f-885f-41f8-995c-504d0e0ad0b5 service nova] Releasing lock "refresh_cache-c065263a-fd40-4b44-a68e-0e03248d0bc0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 645.112082] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100518, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.621576} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.112347] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] c065263a-fd40-4b44-a68e-0e03248d0bc0/c065263a-fd40-4b44-a68e-0e03248d0bc0.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 645.112904] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 645.112904] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-42bfc011-7f8a-4dab-942a-5e268f3e7f62 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.125944] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 645.125944] env[63538]: value = "task-5100520" [ 645.125944] env[63538]: _type = "Task" [ 645.125944] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.139601] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100520, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.397121] env[63538]: DEBUG nova.network.neutron [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Successfully updated port: 204795e5-710d-4501-95e6-1353e467aded {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 645.414894] env[63538]: DEBUG oslo_vmware.api [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100519, 'name': PowerOnVM_Task, 'duration_secs': 0.968258} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.415130] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 645.415338] env[63538]: INFO nova.compute.manager [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Took 10.66 seconds to spawn the instance on the hypervisor. [ 645.415519] env[63538]: DEBUG nova.compute.manager [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 645.416356] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3f1e0c-2a14-4c60-9ac5-64c1e665bf4b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.481026] env[63538]: DEBUG nova.scheduler.client.report [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 645.641058] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100520, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.245986} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.641357] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 645.642162] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17bd42a-337d-4ee6-bcc1-37b4eaa108c7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.671310] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Reconfiguring VM instance instance-00000016 to attach disk [datastore2] c065263a-fd40-4b44-a68e-0e03248d0bc0/c065263a-fd40-4b44-a68e-0e03248d0bc0.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 645.671651] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a2ab0d07-2b98-4215-b9e4-9324cf74788e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.696149] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 645.696149] env[63538]: value = "task-5100521" [ 645.696149] env[63538]: _type = "Task" [ 645.696149] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.706887] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100521, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.753071] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fc9ab23c-7acd-4fce-8b28-1d03b5624126 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "refresh_cache-2e1b0bc7-3909-48e2-b9be-26822a57ee67" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.753452] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fc9ab23c-7acd-4fce-8b28-1d03b5624126 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquired lock "refresh_cache-2e1b0bc7-3909-48e2-b9be-26822a57ee67" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.753691] env[63538]: DEBUG nova.network.neutron [None req-fc9ab23c-7acd-4fce-8b28-1d03b5624126 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 645.753784] env[63538]: DEBUG nova.objects.instance [None req-fc9ab23c-7acd-4fce-8b28-1d03b5624126 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lazy-loading 'info_cache' on Instance uuid 2e1b0bc7-3909-48e2-b9be-26822a57ee67 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 645.790633] env[63538]: DEBUG nova.compute.manager [req-147365b8-482e-4da2-be1e-0d063e76d1ec req-2fe3ac49-aa0a-4493-92ff-c1d6c052c67c service nova] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Received event network-vif-plugged-204795e5-710d-4501-95e6-1353e467aded {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 645.790908] env[63538]: DEBUG oslo_concurrency.lockutils [req-147365b8-482e-4da2-be1e-0d063e76d1ec req-2fe3ac49-aa0a-4493-92ff-c1d6c052c67c service nova] Acquiring lock "e32789d5-59ba-4657-9a9c-84fc9bd6cfdf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.791105] env[63538]: DEBUG oslo_concurrency.lockutils [req-147365b8-482e-4da2-be1e-0d063e76d1ec req-2fe3ac49-aa0a-4493-92ff-c1d6c052c67c service nova] Lock "e32789d5-59ba-4657-9a9c-84fc9bd6cfdf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.793592] env[63538]: DEBUG oslo_concurrency.lockutils [req-147365b8-482e-4da2-be1e-0d063e76d1ec req-2fe3ac49-aa0a-4493-92ff-c1d6c052c67c service nova] Lock "e32789d5-59ba-4657-9a9c-84fc9bd6cfdf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.793592] env[63538]: DEBUG nova.compute.manager [req-147365b8-482e-4da2-be1e-0d063e76d1ec req-2fe3ac49-aa0a-4493-92ff-c1d6c052c67c service nova] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] No waiting events found dispatching network-vif-plugged-204795e5-710d-4501-95e6-1353e467aded {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 645.793592] env[63538]: WARNING nova.compute.manager [req-147365b8-482e-4da2-be1e-0d063e76d1ec req-2fe3ac49-aa0a-4493-92ff-c1d6c052c67c service nova] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Received unexpected event network-vif-plugged-204795e5-710d-4501-95e6-1353e467aded for instance with vm_state building and task_state spawning. [ 645.905691] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Acquiring lock "refresh_cache-e32789d5-59ba-4657-9a9c-84fc9bd6cfdf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.906080] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Acquired lock "refresh_cache-e32789d5-59ba-4657-9a9c-84fc9bd6cfdf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.906307] env[63538]: DEBUG nova.network.neutron [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 645.942545] env[63538]: INFO nova.compute.manager [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Took 38.87 seconds to build instance. [ 645.984234] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.866s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.984686] env[63538]: DEBUG nova.compute.manager [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 645.987472] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.447s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.987707] env[63538]: DEBUG nova.objects.instance [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Lazy-loading 'resources' on Instance uuid 174368d1-9910-495b-a923-842e0440fd01 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 646.212897] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100521, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.445107] env[63538]: DEBUG oslo_concurrency.lockutils [None req-37f090b3-5011-4f44-9299-3009ce841e4f tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "bd222761-92aa-4f2c-a752-ead9c498ee7a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.388s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 646.491584] env[63538]: DEBUG nova.compute.utils [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 646.496291] env[63538]: DEBUG nova.compute.manager [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 646.499700] env[63538]: DEBUG nova.network.neutron [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 646.519597] env[63538]: DEBUG nova.network.neutron [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 646.602782] env[63538]: DEBUG nova.policy [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '16065b424d6244d3bb8d28d1f2aa2a60', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2ab51bcca7dc40688572337d893c1b4f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 646.717153] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100521, 'name': ReconfigVM_Task, 'duration_secs': 0.743941} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.723049] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Reconfigured VM instance instance-00000016 to attach disk [datastore2] c065263a-fd40-4b44-a68e-0e03248d0bc0/c065263a-fd40-4b44-a68e-0e03248d0bc0.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 646.724692] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6c8d0d20-4d47-4ffe-8dcd-169a8d2b7e10 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.732273] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 646.732273] env[63538]: value = "task-5100522" [ 646.732273] env[63538]: _type = "Task" [ 646.732273] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.746066] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100522, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.949179] env[63538]: DEBUG nova.compute.manager [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 646.996463] env[63538]: DEBUG nova.compute.manager [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 647.151635] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30cdfae0-e260-4419-a7f9-28b8ca3aef81 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] Acquiring lock "bf54098e-91a8-403f-a6fe-b58a62daaadb" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.151635] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30cdfae0-e260-4419-a7f9-28b8ca3aef81 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] Lock "bf54098e-91a8-403f-a6fe-b58a62daaadb" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.200970] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db9f4b7a-f8ee-4d29-b166-0730028f87e1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.211337] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7a1bfb1-713c-4057-be84-333d99ce6f5b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.000449] env[63538]: DEBUG nova.compute.utils [None req-30cdfae0-e260-4419-a7f9-28b8ca3aef81 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 648.005389] env[63538]: DEBUG nova.network.neutron [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Successfully created port: 4c120970-8c28-4096-892b-31c6a3b13bbe {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 648.011073] env[63538]: DEBUG nova.network.neutron [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Updating instance_info_cache with network_info: [{"id": "204795e5-710d-4501-95e6-1353e467aded", "address": "fa:16:3e:ae:b1:4f", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.108", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap204795e5-71", "ovs_interfaceid": "204795e5-710d-4501-95e6-1353e467aded", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.017247] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3b5a75b-a1d7-4ee6-a976-36d76dfedf3c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.031377] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7cf388-edc3-4a46-94c7-5901712fe13e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.037936] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100522, 'name': Rename_Task, 'duration_secs': 0.267234} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.037936] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 648.038915] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f8f451e5-3203-4676-8728-463adf913ec6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.051332] env[63538]: DEBUG nova.compute.provider_tree [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 648.055370] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.055484] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 648.055484] env[63538]: value = "task-5100523" [ 648.055484] env[63538]: _type = "Task" [ 648.055484] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.069234] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100523, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.213867] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "e50e95c0-830b-4d71-999b-546b138bf8f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.214271] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "e50e95c0-830b-4d71-999b-546b138bf8f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.216358] env[63538]: DEBUG nova.network.neutron [None req-fc9ab23c-7acd-4fce-8b28-1d03b5624126 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Updating instance_info_cache with network_info: [{"id": "47d19b83-6292-46e2-835f-1198ef52374c", "address": "fa:16:3e:af:6f:01", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47d19b83-62", "ovs_interfaceid": "47d19b83-6292-46e2-835f-1198ef52374c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.511826] env[63538]: DEBUG nova.compute.manager [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 648.517994] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30cdfae0-e260-4419-a7f9-28b8ca3aef81 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] Lock "bf54098e-91a8-403f-a6fe-b58a62daaadb" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.366s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 648.520798] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Releasing lock "refresh_cache-e32789d5-59ba-4657-9a9c-84fc9bd6cfdf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.520798] env[63538]: DEBUG nova.compute.manager [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Instance network_info: |[{"id": "204795e5-710d-4501-95e6-1353e467aded", "address": "fa:16:3e:ae:b1:4f", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.108", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap204795e5-71", "ovs_interfaceid": "204795e5-710d-4501-95e6-1353e467aded", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 648.521466] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:b1:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24144f5a-050a-4f1e-8d8c-774dc16dc791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '204795e5-710d-4501-95e6-1353e467aded', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 648.530443] env[63538]: DEBUG oslo.service.loopingcall [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 648.530443] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 648.530890] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d2640bc2-49c7-4dd6-bca5-f6d28aaa362f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.556871] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 648.556871] env[63538]: value = "task-5100524" [ 648.556871] env[63538]: _type = "Task" [ 648.556871] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.561664] env[63538]: DEBUG nova.virt.hardware [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 648.561664] env[63538]: DEBUG nova.virt.hardware [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 648.562962] env[63538]: DEBUG nova.virt.hardware [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 648.562962] env[63538]: DEBUG nova.virt.hardware [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 648.563224] env[63538]: DEBUG nova.virt.hardware [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 648.563224] env[63538]: DEBUG nova.virt.hardware [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 648.563440] env[63538]: DEBUG nova.virt.hardware [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 648.563614] env[63538]: DEBUG nova.virt.hardware [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 648.564031] env[63538]: DEBUG nova.virt.hardware [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 648.564128] env[63538]: DEBUG nova.virt.hardware [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 648.564259] env[63538]: DEBUG nova.virt.hardware [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 648.565277] env[63538]: DEBUG nova.scheduler.client.report [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 648.577024] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f90f51-5ac6-41bb-b057-4a15934d076a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.592165] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100524, 'name': CreateVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.598474] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100523, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.601253] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-040a2cac-4f79-404a-91bb-7e2bf7dd9a54 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.722198] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fc9ab23c-7acd-4fce-8b28-1d03b5624126 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Releasing lock "refresh_cache-2e1b0bc7-3909-48e2-b9be-26822a57ee67" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.723348] env[63538]: DEBUG nova.objects.instance [None req-fc9ab23c-7acd-4fce-8b28-1d03b5624126 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lazy-loading 'migration_context' on Instance uuid 2e1b0bc7-3909-48e2-b9be-26822a57ee67 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 649.070095] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100524, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.076910] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.089s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.079560] env[63538]: DEBUG oslo_concurrency.lockutils [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.315s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.081823] env[63538]: INFO nova.compute.claims [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 649.098884] env[63538]: DEBUG oslo_vmware.api [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100523, 'name': PowerOnVM_Task, 'duration_secs': 0.724309} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.099269] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 649.099409] env[63538]: INFO nova.compute.manager [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Took 11.54 seconds to spawn the instance on the hypervisor. [ 649.099611] env[63538]: DEBUG nova.compute.manager [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 649.104560] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff127bb-3927-42e7-83af-57d4782e27f8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.135476] env[63538]: INFO nova.scheduler.client.report [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Deleted allocations for instance 174368d1-9910-495b-a923-842e0440fd01 [ 649.228864] env[63538]: DEBUG nova.objects.base [None req-fc9ab23c-7acd-4fce-8b28-1d03b5624126 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Object Instance<2e1b0bc7-3909-48e2-b9be-26822a57ee67> lazy-loaded attributes: info_cache,migration_context {{(pid=63538) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 649.230241] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aeda8ba-93b1-4cbe-b761-252c125cb10f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.259364] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fcd821c1-90dd-437f-be2c-d567a4ce7200 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.269119] env[63538]: DEBUG oslo_vmware.api [None req-fc9ab23c-7acd-4fce-8b28-1d03b5624126 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 649.269119] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525c3af3-2078-361a-fc63-5d7a7b92b8b4" [ 649.269119] env[63538]: _type = "Task" [ 649.269119] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.279364] env[63538]: DEBUG oslo_vmware.api [None req-fc9ab23c-7acd-4fce-8b28-1d03b5624126 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525c3af3-2078-361a-fc63-5d7a7b92b8b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.333342] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30cdfae0-e260-4419-a7f9-28b8ca3aef81 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] Acquiring lock "bf54098e-91a8-403f-a6fe-b58a62daaadb" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.334878] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30cdfae0-e260-4419-a7f9-28b8ca3aef81 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] Lock "bf54098e-91a8-403f-a6fe-b58a62daaadb" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.002s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.335356] env[63538]: INFO nova.compute.manager [None req-30cdfae0-e260-4419-a7f9-28b8ca3aef81 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Attaching volume dc9f1261-2253-4cc1-91b1-e12aa81cc11d to /dev/sdb [ 649.441955] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c29aa145-8cc2-4f0c-b8eb-1517840cf64f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.456866] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deca3185-ad33-44ba-a1a1-85b08e001255 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.477485] env[63538]: DEBUG nova.virt.block_device [None req-30cdfae0-e260-4419-a7f9-28b8ca3aef81 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Updating existing volume attachment record: e4832fe8-ae03-4a6b-925b-8e5ca84e462d {{(pid=63538) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 649.568150] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100524, 'name': CreateVM_Task, 'duration_secs': 0.540069} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.568955] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 649.569190] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 649.569392] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.569750] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 649.570678] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca258483-fe8b-4342-b9f0-94fadf0f3b8d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.578414] env[63538]: DEBUG oslo_vmware.api [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Waiting for the task: (returnval){ [ 649.578414] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bb5937-beba-356c-559d-168f21439ca4" [ 649.578414] env[63538]: _type = "Task" [ 649.578414] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.590008] env[63538]: DEBUG oslo_vmware.api [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bb5937-beba-356c-559d-168f21439ca4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.629309] env[63538]: INFO nova.compute.manager [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Took 42.00 seconds to build instance. [ 649.647388] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5a860b46-055b-4a10-acbd-3cb45e602f29 tempest-FloatingIPsAssociationNegativeTestJSON-316205081 tempest-FloatingIPsAssociationNegativeTestJSON-316205081-project-member] Lock "174368d1-9910-495b-a923-842e0440fd01" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.050s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.683842] env[63538]: DEBUG nova.compute.manager [req-46ffc62e-0a19-4b9c-b106-1ddeea620fe0 req-8e871ab5-20f5-4f2b-b210-42ce3adfca08 service nova] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Received event network-changed-204795e5-710d-4501-95e6-1353e467aded {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 649.684056] env[63538]: DEBUG nova.compute.manager [req-46ffc62e-0a19-4b9c-b106-1ddeea620fe0 req-8e871ab5-20f5-4f2b-b210-42ce3adfca08 service nova] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Refreshing instance network info cache due to event network-changed-204795e5-710d-4501-95e6-1353e467aded. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 649.684276] env[63538]: DEBUG oslo_concurrency.lockutils [req-46ffc62e-0a19-4b9c-b106-1ddeea620fe0 req-8e871ab5-20f5-4f2b-b210-42ce3adfca08 service nova] Acquiring lock "refresh_cache-e32789d5-59ba-4657-9a9c-84fc9bd6cfdf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 649.684420] env[63538]: DEBUG oslo_concurrency.lockutils [req-46ffc62e-0a19-4b9c-b106-1ddeea620fe0 req-8e871ab5-20f5-4f2b-b210-42ce3adfca08 service nova] Acquired lock "refresh_cache-e32789d5-59ba-4657-9a9c-84fc9bd6cfdf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.684582] env[63538]: DEBUG nova.network.neutron [req-46ffc62e-0a19-4b9c-b106-1ddeea620fe0 req-8e871ab5-20f5-4f2b-b210-42ce3adfca08 service nova] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Refreshing network info cache for port 204795e5-710d-4501-95e6-1353e467aded {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 649.787314] env[63538]: DEBUG oslo_vmware.api [None req-fc9ab23c-7acd-4fce-8b28-1d03b5624126 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525c3af3-2078-361a-fc63-5d7a7b92b8b4, 'name': SearchDatastore_Task, 'duration_secs': 0.014131} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.787314] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fc9ab23c-7acd-4fce-8b28-1d03b5624126 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.093484] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.094229] env[63538]: DEBUG oslo_concurrency.lockutils [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "04dc612b-7987-405b-9716-95c4ff3535ec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.094440] env[63538]: DEBUG oslo_concurrency.lockutils [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "04dc612b-7987-405b-9716-95c4ff3535ec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.094650] env[63538]: DEBUG oslo_vmware.api [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bb5937-beba-356c-559d-168f21439ca4, 'name': SearchDatastore_Task, 'duration_secs': 0.033959} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.094843] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.096815] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.097084] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 650.097313] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.097455] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.097629] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 650.101308] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-30719c35-c1b1-4914-8b60-53ecfe861e1d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.113194] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 650.114460] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 650.117236] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d68aa02b-0bec-46cb-b69e-ac496e8eace7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.126309] env[63538]: DEBUG oslo_vmware.api [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Waiting for the task: (returnval){ [ 650.126309] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52daee0e-1068-358d-8eee-36e7ee2b6a81" [ 650.126309] env[63538]: _type = "Task" [ 650.126309] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.138510] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1510ea4c-eec5-4638-8b78-e5125553ec3b tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "c065263a-fd40-4b44-a68e-0e03248d0bc0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.520s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 650.139599] env[63538]: DEBUG oslo_vmware.api [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52daee0e-1068-358d-8eee-36e7ee2b6a81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.611400] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.611670] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Starting heal instance info cache {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10017}} [ 650.611987] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Rebuilding the list of instances to heal {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10021}} [ 650.643867] env[63538]: DEBUG oslo_vmware.api [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52daee0e-1068-358d-8eee-36e7ee2b6a81, 'name': SearchDatastore_Task, 'duration_secs': 0.018271} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.645874] env[63538]: DEBUG nova.compute.manager [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 650.647813] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0b96436-37ea-4e67-b810-8ba3682cf75c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.657851] env[63538]: DEBUG oslo_vmware.api [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Waiting for the task: (returnval){ [ 650.657851] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5292c893-a60d-f6fc-e1cc-5dc6c157ef1f" [ 650.657851] env[63538]: _type = "Task" [ 650.657851] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.673665] env[63538]: DEBUG oslo_vmware.api [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5292c893-a60d-f6fc-e1cc-5dc6c157ef1f, 'name': SearchDatastore_Task, 'duration_secs': 0.011911} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.676775] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.677610] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] e32789d5-59ba-4657-9a9c-84fc9bd6cfdf/e32789d5-59ba-4657-9a9c-84fc9bd6cfdf.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 650.680955] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-499f396d-ce2e-4692-a9c8-07bd66804abd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.692080] env[63538]: DEBUG oslo_vmware.api [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Waiting for the task: (returnval){ [ 650.692080] env[63538]: value = "task-5100528" [ 650.692080] env[63538]: _type = "Task" [ 650.692080] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.709445] env[63538]: DEBUG oslo_vmware.api [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100528, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.794887] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58a3df6b-afe8-4c8c-867a-2c71ae6f3dd6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.806447] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe64fc61-e0c6-4f99-9a1e-e70681385ab0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.852094] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549fc640-6c73-4045-a983-905043f1f0b0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.864494] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-016f6061-5e7c-4f96-a9fd-cb021a90b3b7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.887958] env[63538]: DEBUG nova.compute.provider_tree [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 651.122834] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Skipping network cache update for instance because it is Building. {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10030}} [ 651.123703] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Skipping network cache update for instance because it is Building. {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10030}} [ 651.123703] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Skipping network cache update for instance because it is Building. {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10030}} [ 651.148071] env[63538]: DEBUG nova.network.neutron [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Successfully updated port: 4c120970-8c28-4096-892b-31c6a3b13bbe {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 651.159604] env[63538]: DEBUG nova.network.neutron [req-46ffc62e-0a19-4b9c-b106-1ddeea620fe0 req-8e871ab5-20f5-4f2b-b210-42ce3adfca08 service nova] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Updated VIF entry in instance network info cache for port 204795e5-710d-4501-95e6-1353e467aded. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 651.160070] env[63538]: DEBUG nova.network.neutron [req-46ffc62e-0a19-4b9c-b106-1ddeea620fe0 req-8e871ab5-20f5-4f2b-b210-42ce3adfca08 service nova] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Updating instance_info_cache with network_info: [{"id": "204795e5-710d-4501-95e6-1353e467aded", "address": "fa:16:3e:ae:b1:4f", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.108", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap204795e5-71", "ovs_interfaceid": "204795e5-710d-4501-95e6-1353e467aded", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.193373] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 651.213620] env[63538]: DEBUG oslo_vmware.api [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100528, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489056} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.213620] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] e32789d5-59ba-4657-9a9c-84fc9bd6cfdf/e32789d5-59ba-4657-9a9c-84fc9bd6cfdf.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 651.213620] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 651.213620] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-89ae191d-7982-4ea6-b335-d26174cdcd0e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.219789] env[63538]: DEBUG oslo_vmware.api [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Waiting for the task: (returnval){ [ 651.219789] env[63538]: value = "task-5100529" [ 651.219789] env[63538]: _type = "Task" [ 651.219789] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.231031] env[63538]: DEBUG oslo_vmware.api [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100529, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.349571] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "refresh_cache-c8a02fa6-5232-4dde-b6dd-0da1089b6bbf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 651.349764] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquired lock "refresh_cache-c8a02fa6-5232-4dde-b6dd-0da1089b6bbf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.350127] env[63538]: DEBUG nova.network.neutron [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Forcefully refreshing network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 651.350127] env[63538]: DEBUG nova.objects.instance [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lazy-loading 'info_cache' on Instance uuid c8a02fa6-5232-4dde-b6dd-0da1089b6bbf {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 651.390996] env[63538]: DEBUG nova.scheduler.client.report [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 651.651117] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Acquiring lock "refresh_cache-a6bb8713-6b00-4a43-96b7-a84ee39d790d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 651.652195] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Acquired lock "refresh_cache-a6bb8713-6b00-4a43-96b7-a84ee39d790d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.652195] env[63538]: DEBUG nova.network.neutron [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 651.669934] env[63538]: DEBUG oslo_concurrency.lockutils [req-46ffc62e-0a19-4b9c-b106-1ddeea620fe0 req-8e871ab5-20f5-4f2b-b210-42ce3adfca08 service nova] Releasing lock "refresh_cache-e32789d5-59ba-4657-9a9c-84fc9bd6cfdf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 651.733605] env[63538]: DEBUG oslo_vmware.api [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100529, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077216} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.734323] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 651.734633] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea2c301d-21d5-4b9a-8db5-7017da17e6ea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.755615] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Acquiring lock "1e33b68e-8509-4ec4-8ec4-dc758aae9a5a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 651.755899] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Lock "1e33b68e-8509-4ec4-8ec4-dc758aae9a5a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 651.756121] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Acquiring lock "1e33b68e-8509-4ec4-8ec4-dc758aae9a5a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 651.756693] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Lock "1e33b68e-8509-4ec4-8ec4-dc758aae9a5a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 651.756693] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Lock "1e33b68e-8509-4ec4-8ec4-dc758aae9a5a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 651.770310] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] e32789d5-59ba-4657-9a9c-84fc9bd6cfdf/e32789d5-59ba-4657-9a9c-84fc9bd6cfdf.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 651.771817] env[63538]: INFO nova.compute.manager [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Terminating instance [ 651.773865] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-361a538a-d250-440d-aba5-d5834cc30d16 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.792496] env[63538]: DEBUG nova.compute.manager [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 651.792496] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 651.794018] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40348249-9741-4121-9b74-7f7e3bd0d8e9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.805354] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 651.807947] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b033736c-18bf-42a6-b3ad-3e468d10836e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.809448] env[63538]: DEBUG oslo_vmware.api [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Waiting for the task: (returnval){ [ 651.809448] env[63538]: value = "task-5100530" [ 651.809448] env[63538]: _type = "Task" [ 651.809448] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.817208] env[63538]: DEBUG oslo_vmware.api [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for the task: (returnval){ [ 651.817208] env[63538]: value = "task-5100531" [ 651.817208] env[63538]: _type = "Task" [ 651.817208] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.826709] env[63538]: DEBUG oslo_vmware.api [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100530, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.834483] env[63538]: DEBUG oslo_vmware.api [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100531, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.878472] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquiring lock "db5993ce-6982-4b82-8f5d-3fe51df8896b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 651.878721] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "db5993ce-6982-4b82-8f5d-3fe51df8896b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 651.897681] env[63538]: DEBUG oslo_concurrency.lockutils [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.818s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 651.900350] env[63538]: DEBUG nova.compute.manager [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 651.903205] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.038s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 651.904906] env[63538]: INFO nova.compute.claims [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 652.213393] env[63538]: DEBUG nova.network.neutron [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 652.327768] env[63538]: DEBUG oslo_vmware.api [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100530, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.335325] env[63538]: DEBUG oslo_vmware.api [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100531, 'name': PowerOffVM_Task, 'duration_secs': 0.267527} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.336706] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 652.337102] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 652.337569] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-92c79819-f01c-4568-9e06-83e8afad0b3d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.409677] env[63538]: DEBUG nova.compute.utils [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 652.415858] env[63538]: DEBUG nova.compute.manager [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 652.418031] env[63538]: DEBUG nova.network.neutron [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 652.425025] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 652.425025] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 652.425025] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Deleting the datastore file [datastore1] 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 652.425025] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-da3f2ef2-fe36-447f-8bc0-b0ff5038c81f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.434739] env[63538]: DEBUG oslo_vmware.api [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for the task: (returnval){ [ 652.434739] env[63538]: value = "task-5100534" [ 652.434739] env[63538]: _type = "Task" [ 652.434739] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.445514] env[63538]: DEBUG oslo_vmware.api [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100534, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.563318] env[63538]: DEBUG nova.policy [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0fbe1f9edd6f4de49854f2f858314e8f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ae58d691f8e4b10a978587c2b8863e1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 652.609188] env[63538]: DEBUG nova.network.neutron [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Updating instance_info_cache with network_info: [{"id": "4c120970-8c28-4096-892b-31c6a3b13bbe", "address": "fa:16:3e:e7:f8:bb", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.54", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c120970-8c", "ovs_interfaceid": "4c120970-8c28-4096-892b-31c6a3b13bbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.824466] env[63538]: DEBUG oslo_vmware.api [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100530, 'name': ReconfigVM_Task, 'duration_secs': 0.588479} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.824770] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Reconfigured VM instance instance-00000017 to attach disk [datastore1] e32789d5-59ba-4657-9a9c-84fc9bd6cfdf/e32789d5-59ba-4657-9a9c-84fc9bd6cfdf.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 652.825440] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e1bc082e-9b89-44b5-bad0-62dd1d2d654c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.830129] env[63538]: DEBUG nova.compute.manager [req-e4020051-a523-4e84-ac6b-376be0797739 req-70396300-0476-460d-ac62-76e83641661e service nova] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Received event network-vif-plugged-4c120970-8c28-4096-892b-31c6a3b13bbe {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 652.830129] env[63538]: DEBUG oslo_concurrency.lockutils [req-e4020051-a523-4e84-ac6b-376be0797739 req-70396300-0476-460d-ac62-76e83641661e service nova] Acquiring lock "a6bb8713-6b00-4a43-96b7-a84ee39d790d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.830129] env[63538]: DEBUG oslo_concurrency.lockutils [req-e4020051-a523-4e84-ac6b-376be0797739 req-70396300-0476-460d-ac62-76e83641661e service nova] Lock "a6bb8713-6b00-4a43-96b7-a84ee39d790d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.830129] env[63538]: DEBUG oslo_concurrency.lockutils [req-e4020051-a523-4e84-ac6b-376be0797739 req-70396300-0476-460d-ac62-76e83641661e service nova] Lock "a6bb8713-6b00-4a43-96b7-a84ee39d790d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 652.830129] env[63538]: DEBUG nova.compute.manager [req-e4020051-a523-4e84-ac6b-376be0797739 req-70396300-0476-460d-ac62-76e83641661e service nova] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] No waiting events found dispatching network-vif-plugged-4c120970-8c28-4096-892b-31c6a3b13bbe {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 652.830363] env[63538]: WARNING nova.compute.manager [req-e4020051-a523-4e84-ac6b-376be0797739 req-70396300-0476-460d-ac62-76e83641661e service nova] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Received unexpected event network-vif-plugged-4c120970-8c28-4096-892b-31c6a3b13bbe for instance with vm_state building and task_state spawning. [ 652.830363] env[63538]: DEBUG nova.compute.manager [req-e4020051-a523-4e84-ac6b-376be0797739 req-70396300-0476-460d-ac62-76e83641661e service nova] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Received event network-changed-4c120970-8c28-4096-892b-31c6a3b13bbe {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 652.830363] env[63538]: DEBUG nova.compute.manager [req-e4020051-a523-4e84-ac6b-376be0797739 req-70396300-0476-460d-ac62-76e83641661e service nova] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Refreshing instance network info cache due to event network-changed-4c120970-8c28-4096-892b-31c6a3b13bbe. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 652.830507] env[63538]: DEBUG oslo_concurrency.lockutils [req-e4020051-a523-4e84-ac6b-376be0797739 req-70396300-0476-460d-ac62-76e83641661e service nova] Acquiring lock "refresh_cache-a6bb8713-6b00-4a43-96b7-a84ee39d790d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 652.841032] env[63538]: DEBUG oslo_vmware.api [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Waiting for the task: (returnval){ [ 652.841032] env[63538]: value = "task-5100535" [ 652.841032] env[63538]: _type = "Task" [ 652.841032] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.853824] env[63538]: DEBUG oslo_vmware.api [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100535, 'name': Rename_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.917780] env[63538]: DEBUG nova.compute.manager [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 652.950305] env[63538]: DEBUG oslo_vmware.api [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100534, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.269142} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.950567] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 652.950768] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 652.950965] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 652.951149] env[63538]: INFO nova.compute.manager [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Took 1.16 seconds to destroy the instance on the hypervisor. [ 652.951402] env[63538]: DEBUG oslo.service.loopingcall [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 652.953043] env[63538]: DEBUG nova.compute.manager [-] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 652.953043] env[63538]: DEBUG nova.network.neutron [-] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 653.111251] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Releasing lock "refresh_cache-a6bb8713-6b00-4a43-96b7-a84ee39d790d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.111612] env[63538]: DEBUG nova.compute.manager [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Instance network_info: |[{"id": "4c120970-8c28-4096-892b-31c6a3b13bbe", "address": "fa:16:3e:e7:f8:bb", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.54", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c120970-8c", "ovs_interfaceid": "4c120970-8c28-4096-892b-31c6a3b13bbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 653.112156] env[63538]: DEBUG oslo_concurrency.lockutils [req-e4020051-a523-4e84-ac6b-376be0797739 req-70396300-0476-460d-ac62-76e83641661e service nova] Acquired lock "refresh_cache-a6bb8713-6b00-4a43-96b7-a84ee39d790d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.112348] env[63538]: DEBUG nova.network.neutron [req-e4020051-a523-4e84-ac6b-376be0797739 req-70396300-0476-460d-ac62-76e83641661e service nova] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Refreshing network info cache for port 4c120970-8c28-4096-892b-31c6a3b13bbe {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 653.113823] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e7:f8:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24144f5a-050a-4f1e-8d8c-774dc16dc791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4c120970-8c28-4096-892b-31c6a3b13bbe', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 653.128212] env[63538]: DEBUG oslo.service.loopingcall [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 653.133935] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 653.135065] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-071f684b-cda3-48c4-a0ac-af0143b45985 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.157688] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquiring lock "c065263a-fd40-4b44-a68e-0e03248d0bc0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.157861] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "c065263a-fd40-4b44-a68e-0e03248d0bc0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.158081] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquiring lock "c065263a-fd40-4b44-a68e-0e03248d0bc0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.158384] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "c065263a-fd40-4b44-a68e-0e03248d0bc0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.158601] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "c065263a-fd40-4b44-a68e-0e03248d0bc0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 653.161097] env[63538]: INFO nova.compute.manager [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Terminating instance [ 653.163821] env[63538]: DEBUG nova.compute.manager [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 653.163821] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 653.164722] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024a74bc-5e53-4717-913d-c3055ef4dd6d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.170081] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 653.170081] env[63538]: value = "task-5100536" [ 653.170081] env[63538]: _type = "Task" [ 653.170081] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.177680] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 653.181743] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-608b55c7-ba40-42a1-a247-6a7f3cd3f47b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.193039] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100536, 'name': CreateVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.201035] env[63538]: DEBUG oslo_vmware.api [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 653.201035] env[63538]: value = "task-5100537" [ 653.201035] env[63538]: _type = "Task" [ 653.201035] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.212231] env[63538]: DEBUG oslo_vmware.api [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100537, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.352595] env[63538]: DEBUG oslo_vmware.api [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100535, 'name': Rename_Task, 'duration_secs': 0.156333} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.353090] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 653.355402] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d7cc980-37f5-447a-9ec6-9ae8a3477923 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.365513] env[63538]: DEBUG oslo_vmware.api [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Waiting for the task: (returnval){ [ 653.365513] env[63538]: value = "task-5100538" [ 653.365513] env[63538]: _type = "Task" [ 653.365513] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.375738] env[63538]: DEBUG oslo_vmware.api [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100538, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.421282] env[63538]: INFO nova.virt.block_device [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Booting with volume 9cb99482-8e0b-49d1-9249-8f411b389c09 at /dev/sda [ 653.484682] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4c638b71-bf91-4427-9459-d21ad38efe8a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.505482] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45142667-79a8-4ddb-83c6-c66eb583c6c8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.564617] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-686f7428-429c-46ca-962c-ef4655941270 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.577037] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b03401-bd23-47ef-a3c4-e0f62b6cfd2e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.602074] env[63538]: DEBUG oslo_concurrency.lockutils [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Acquiring lock "5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.602740] env[63538]: DEBUG oslo_concurrency.lockutils [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Lock "5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.636737] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90dd5f44-6eb4-4b7c-8daa-43081273acbc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.648305] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa752e0-052d-40c4-954a-2db129c57083 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.671331] env[63538]: DEBUG nova.virt.block_device [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Updating existing volume attachment record: 1bb2a248-daa4-46c1-94ab-dbf0f6147bc7 {{(pid=63538) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 653.690837] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100536, 'name': CreateVM_Task, 'duration_secs': 0.464} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.691095] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 653.692808] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.692996] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.693531] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 653.697023] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0417edfa-b8cd-45c1-9c98-9b73ae1c637d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.709763] env[63538]: DEBUG oslo_vmware.api [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Waiting for the task: (returnval){ [ 653.709763] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c49217-80db-0db0-2e00-08f6f5805437" [ 653.709763] env[63538]: _type = "Task" [ 653.709763] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.717883] env[63538]: DEBUG oslo_vmware.api [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100537, 'name': PowerOffVM_Task, 'duration_secs': 0.290718} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.719988] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 653.719988] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 653.719988] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9dad6b20-f6f6-441b-8543-5cc49b18a048 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.726083] env[63538]: DEBUG oslo_vmware.api [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c49217-80db-0db0-2e00-08f6f5805437, 'name': SearchDatastore_Task, 'duration_secs': 0.011527} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.726639] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.727497] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 653.727497] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.727497] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.727645] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 653.728239] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8282cf65-0a7e-4e5a-8b82-4e28e9f9bd51 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.743195] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 653.743826] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 653.745137] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45c3d7c5-8e08-44eb-a752-f03da8d7f26d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.757057] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c96adda7-0bf6-4850-b67b-8164061bc807 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.764882] env[63538]: DEBUG oslo_vmware.api [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Waiting for the task: (returnval){ [ 653.764882] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d24755-17c4-5570-7b26-66d5b8a55d9a" [ 653.764882] env[63538]: _type = "Task" [ 653.764882] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.774804] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4104861-31c5-4338-a394-448ee4c65c97 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.781058] env[63538]: DEBUG oslo_vmware.api [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d24755-17c4-5570-7b26-66d5b8a55d9a, 'name': SearchDatastore_Task, 'duration_secs': 0.013298} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.782636] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8c622d6-ff9b-44d5-bdf9-8341d130556d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.819320] env[63538]: DEBUG nova.network.neutron [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Successfully created port: 845bbf0e-88f5-474e-b875-0a12bfaebd27 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 653.821769] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a13e5045-d3fa-42f3-ab18-c37c1ef4b0ac {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.824610] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 653.824817] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 653.824991] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Deleting the datastore file [datastore2] c065263a-fd40-4b44-a68e-0e03248d0bc0 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 653.829444] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0f5fd2e6-bd20-443e-a5df-f54ce4ce2cba {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.832818] env[63538]: DEBUG oslo_vmware.api [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Waiting for the task: (returnval){ [ 653.832818] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52eb256a-3d84-7f9a-5f19-b10f445b3780" [ 653.832818] env[63538]: _type = "Task" [ 653.832818] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.841269] env[63538]: DEBUG oslo_vmware.api [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 653.841269] env[63538]: value = "task-5100540" [ 653.841269] env[63538]: _type = "Task" [ 653.841269] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.842908] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f028e4c9-bace-4188-a84c-3d528087ac2c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.854991] env[63538]: DEBUG oslo_vmware.api [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52eb256a-3d84-7f9a-5f19-b10f445b3780, 'name': SearchDatastore_Task, 'duration_secs': 0.00989} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.856328] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.856328] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] a6bb8713-6b00-4a43-96b7-a84ee39d790d/a6bb8713-6b00-4a43-96b7-a84ee39d790d.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 653.856328] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-66fce65a-096f-4d48-8bbf-9ce278409879 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.871193] env[63538]: DEBUG nova.compute.provider_tree [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 653.879866] env[63538]: DEBUG oslo_vmware.api [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100540, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.889237] env[63538]: DEBUG oslo_vmware.api [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Waiting for the task: (returnval){ [ 653.889237] env[63538]: value = "task-5100541" [ 653.889237] env[63538]: _type = "Task" [ 653.889237] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.899992] env[63538]: DEBUG oslo_vmware.api [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100538, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.907733] env[63538]: DEBUG oslo_vmware.api [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100541, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.981935] env[63538]: DEBUG nova.network.neutron [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Updating instance_info_cache with network_info: [{"id": "8d80ee33-5e67-4651-a9b1-1f58ca92fb2e", "address": "fa:16:3e:e1:60:79", "network": {"id": "2801e625-b23c-455a-80ae-80f2a4f34148", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1431071033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1f0c999ede418c866074d9276050ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d80ee33-5e", "ovs_interfaceid": "8d80ee33-5e67-4651-a9b1-1f58ca92fb2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.023236] env[63538]: DEBUG nova.network.neutron [req-e4020051-a523-4e84-ac6b-376be0797739 req-70396300-0476-460d-ac62-76e83641661e service nova] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Updated VIF entry in instance network info cache for port 4c120970-8c28-4096-892b-31c6a3b13bbe. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 654.023651] env[63538]: DEBUG nova.network.neutron [req-e4020051-a523-4e84-ac6b-376be0797739 req-70396300-0476-460d-ac62-76e83641661e service nova] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Updating instance_info_cache with network_info: [{"id": "4c120970-8c28-4096-892b-31c6a3b13bbe", "address": "fa:16:3e:e7:f8:bb", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.54", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c120970-8c", "ovs_interfaceid": "4c120970-8c28-4096-892b-31c6a3b13bbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.065768] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-30cdfae0-e260-4419-a7f9-28b8ca3aef81 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Volume attach. Driver type: vmdk {{(pid=63538) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 654.066147] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-30cdfae0-e260-4419-a7f9-28b8ca3aef81 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992302', 'volume_id': 'dc9f1261-2253-4cc1-91b1-e12aa81cc11d', 'name': 'volume-dc9f1261-2253-4cc1-91b1-e12aa81cc11d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bf54098e-91a8-403f-a6fe-b58a62daaadb', 'attached_at': '', 'detached_at': '', 'volume_id': 'dc9f1261-2253-4cc1-91b1-e12aa81cc11d', 'serial': 'dc9f1261-2253-4cc1-91b1-e12aa81cc11d'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 654.069211] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e8d2ea-428d-4108-b63f-a93d6b6be962 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.087477] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b0c955c-5a08-40d7-9f7c-94ad8174f01e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.122027] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-30cdfae0-e260-4419-a7f9-28b8ca3aef81 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] volume-dc9f1261-2253-4cc1-91b1-e12aa81cc11d/volume-dc9f1261-2253-4cc1-91b1-e12aa81cc11d.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 654.122027] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a58aeaf-8b59-46b3-9a2a-a2e06b519c0c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.143541] env[63538]: DEBUG oslo_vmware.api [None req-30cdfae0-e260-4419-a7f9-28b8ca3aef81 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] Waiting for the task: (returnval){ [ 654.143541] env[63538]: value = "task-5100542" [ 654.143541] env[63538]: _type = "Task" [ 654.143541] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.154386] env[63538]: DEBUG oslo_vmware.api [None req-30cdfae0-e260-4419-a7f9-28b8ca3aef81 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] Task: {'id': task-5100542, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.356929] env[63538]: DEBUG oslo_vmware.api [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100540, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174473} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.357290] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 654.357451] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 654.357659] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 654.357954] env[63538]: INFO nova.compute.manager [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Took 1.19 seconds to destroy the instance on the hypervisor. [ 654.358172] env[63538]: DEBUG oslo.service.loopingcall [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 654.358446] env[63538]: DEBUG nova.compute.manager [-] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 654.358556] env[63538]: DEBUG nova.network.neutron [-] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 654.377846] env[63538]: DEBUG nova.scheduler.client.report [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 654.385612] env[63538]: DEBUG oslo_vmware.api [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100538, 'name': PowerOnVM_Task, 'duration_secs': 0.609212} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.386309] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 654.386446] env[63538]: INFO nova.compute.manager [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Took 11.46 seconds to spawn the instance on the hypervisor. [ 654.386649] env[63538]: DEBUG nova.compute.manager [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 654.387564] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb91247a-46b3-4ca1-ad25-cb097ac0dca8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.416741] env[63538]: DEBUG oslo_vmware.api [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100541, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.486383] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Releasing lock "refresh_cache-c8a02fa6-5232-4dde-b6dd-0da1089b6bbf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.487578] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Updated the network info_cache for instance {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10088}} [ 654.487832] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 654.488128] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 654.488378] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 654.488546] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 654.488793] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 654.489530] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 654.489530] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63538) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10636}} [ 654.489530] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 654.527652] env[63538]: DEBUG oslo_concurrency.lockutils [req-e4020051-a523-4e84-ac6b-376be0797739 req-70396300-0476-460d-ac62-76e83641661e service nova] Releasing lock "refresh_cache-a6bb8713-6b00-4a43-96b7-a84ee39d790d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.662967] env[63538]: DEBUG oslo_vmware.api [None req-30cdfae0-e260-4419-a7f9-28b8ca3aef81 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] Task: {'id': task-5100542, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.856675] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "707a79e2-f5db-479c-b719-1e040935cda3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.857093] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "707a79e2-f5db-479c-b719-1e040935cda3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.890190] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.985s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.890190] env[63538]: DEBUG nova.compute.manager [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 654.891941] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.200s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.898655] env[63538]: INFO nova.compute.claims [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 654.912631] env[63538]: DEBUG oslo_vmware.api [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100541, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.54457} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.913100] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] a6bb8713-6b00-4a43-96b7-a84ee39d790d/a6bb8713-6b00-4a43-96b7-a84ee39d790d.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 654.913250] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 654.913401] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7985b048-5074-4007-b53c-5c589bd4700e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.928346] env[63538]: DEBUG oslo_vmware.api [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Waiting for the task: (returnval){ [ 654.928346] env[63538]: value = "task-5100543" [ 654.928346] env[63538]: _type = "Task" [ 654.928346] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.934300] env[63538]: INFO nova.compute.manager [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Took 43.11 seconds to build instance. [ 654.945039] env[63538]: DEBUG oslo_vmware.api [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100543, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.995705] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.159841] env[63538]: DEBUG oslo_vmware.api [None req-30cdfae0-e260-4419-a7f9-28b8ca3aef81 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] Task: {'id': task-5100542, 'name': ReconfigVM_Task, 'duration_secs': 0.59378} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.164297] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-30cdfae0-e260-4419-a7f9-28b8ca3aef81 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Reconfigured VM instance instance-0000000e to attach disk [datastore1] volume-dc9f1261-2253-4cc1-91b1-e12aa81cc11d/volume-dc9f1261-2253-4cc1-91b1-e12aa81cc11d.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 655.170728] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63bb6c98-4693-4dbd-9e96-12909021fa25 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.195916] env[63538]: DEBUG oslo_vmware.api [None req-30cdfae0-e260-4419-a7f9-28b8ca3aef81 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] Waiting for the task: (returnval){ [ 655.195916] env[63538]: value = "task-5100544" [ 655.195916] env[63538]: _type = "Task" [ 655.195916] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.212553] env[63538]: DEBUG oslo_vmware.api [None req-30cdfae0-e260-4419-a7f9-28b8ca3aef81 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] Task: {'id': task-5100544, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.298440] env[63538]: DEBUG nova.network.neutron [-] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.404906] env[63538]: DEBUG nova.compute.utils [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 655.406520] env[63538]: DEBUG nova.compute.manager [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 655.406710] env[63538]: DEBUG nova.network.neutron [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 655.441172] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0cef4799-7cd7-41e8-bb7c-58e0624ddcc3 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Lock "e32789d5-59ba-4657-9a9c-84fc9bd6cfdf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.628s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.446535] env[63538]: DEBUG nova.compute.manager [req-6ed5bbac-ffda-402e-85cd-64615a702abe req-3d28e050-48d8-4012-ba82-9f0c242b0e92 service nova] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Received event network-vif-deleted-514b9391-5894-4419-800a-e06658f8a44b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 655.451195] env[63538]: DEBUG oslo_vmware.api [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100543, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073223} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.451521] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 655.451993] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Acquiring lock "7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.452380] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Lock "7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.452536] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Acquiring lock "7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.452774] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Lock "7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.452951] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Lock "7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.455409] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e08869-8683-4a07-a7e8-02b42de39dd6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.462998] env[63538]: INFO nova.compute.manager [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Terminating instance [ 655.466477] env[63538]: DEBUG nova.compute.manager [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 655.466630] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 655.468390] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c52d85-b4f6-4bec-82d3-90348758d51e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.491231] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] a6bb8713-6b00-4a43-96b7-a84ee39d790d/a6bb8713-6b00-4a43-96b7-a84ee39d790d.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 655.492343] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33c95744-00f5-4d25-ae90-fc94b3e7bc39 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.512530] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 655.512791] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e9edbde3-64cb-4d21-89fb-f3090f3789c4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.519216] env[63538]: DEBUG oslo_vmware.api [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Waiting for the task: (returnval){ [ 655.519216] env[63538]: value = "task-5100545" [ 655.519216] env[63538]: _type = "Task" [ 655.519216] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.520984] env[63538]: DEBUG oslo_vmware.api [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Waiting for the task: (returnval){ [ 655.520984] env[63538]: value = "task-5100546" [ 655.520984] env[63538]: _type = "Task" [ 655.520984] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.534921] env[63538]: DEBUG oslo_vmware.api [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': task-5100546, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.538581] env[63538]: DEBUG oslo_vmware.api [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100545, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.558586] env[63538]: DEBUG nova.policy [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '78d5688727174c08a29ea7f3ac35e129', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cdfc96ac41be43f9ba0596444eb75737', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 655.610736] env[63538]: DEBUG nova.network.neutron [-] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.709928] env[63538]: DEBUG oslo_vmware.api [None req-30cdfae0-e260-4419-a7f9-28b8ca3aef81 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] Task: {'id': task-5100544, 'name': ReconfigVM_Task, 'duration_secs': 0.187175} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.710549] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-30cdfae0-e260-4419-a7f9-28b8ca3aef81 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992302', 'volume_id': 'dc9f1261-2253-4cc1-91b1-e12aa81cc11d', 'name': 'volume-dc9f1261-2253-4cc1-91b1-e12aa81cc11d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bf54098e-91a8-403f-a6fe-b58a62daaadb', 'attached_at': '', 'detached_at': '', 'volume_id': 'dc9f1261-2253-4cc1-91b1-e12aa81cc11d', 'serial': 'dc9f1261-2253-4cc1-91b1-e12aa81cc11d'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 655.806032] env[63538]: INFO nova.compute.manager [-] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Took 2.85 seconds to deallocate network for instance. [ 655.864940] env[63538]: DEBUG nova.compute.manager [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 655.865550] env[63538]: DEBUG nova.virt.hardware [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 655.865751] env[63538]: DEBUG nova.virt.hardware [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 655.865932] env[63538]: DEBUG nova.virt.hardware [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 655.866139] env[63538]: DEBUG nova.virt.hardware [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 655.868267] env[63538]: DEBUG nova.virt.hardware [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 655.868267] env[63538]: DEBUG nova.virt.hardware [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 655.868267] env[63538]: DEBUG nova.virt.hardware [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 655.868267] env[63538]: DEBUG nova.virt.hardware [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 655.868267] env[63538]: DEBUG nova.virt.hardware [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 655.868637] env[63538]: DEBUG nova.virt.hardware [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 655.868637] env[63538]: DEBUG nova.virt.hardware [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 655.869104] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ffacdda-5461-4e3d-95e3-b4b241193078 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.880794] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1670c55b-eeea-45f4-9213-e40499a43e83 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.919608] env[63538]: DEBUG nova.compute.manager [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 655.947648] env[63538]: DEBUG nova.compute.manager [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 656.042111] env[63538]: DEBUG oslo_vmware.api [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100545, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.044893] env[63538]: DEBUG oslo_vmware.api [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': task-5100546, 'name': PowerOffVM_Task, 'duration_secs': 0.394217} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.045388] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 656.045388] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 656.045640] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b7bf458b-7b55-4a3a-9cb0-54eae53c4e8c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.119156] env[63538]: DEBUG nova.network.neutron [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Successfully updated port: 845bbf0e-88f5-474e-b875-0a12bfaebd27 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 656.120919] env[63538]: INFO nova.compute.manager [-] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Took 1.76 seconds to deallocate network for instance. [ 656.131012] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 656.131012] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 656.131012] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Deleting the datastore file [datastore2] 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 656.131319] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-720f0452-960b-4cc7-9df9-6e238d589ef7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.146259] env[63538]: DEBUG oslo_vmware.api [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Waiting for the task: (returnval){ [ 656.146259] env[63538]: value = "task-5100548" [ 656.146259] env[63538]: _type = "Task" [ 656.146259] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.159577] env[63538]: DEBUG oslo_vmware.api [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': task-5100548, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.297838] env[63538]: DEBUG nova.network.neutron [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Successfully created port: dc774ce7-f5aa-452a-828d-e56e0339fe56 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 656.313685] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.439269] env[63538]: DEBUG nova.compute.manager [req-562f88e0-be7c-46fe-afaf-5c42520c8370 req-dc7b5152-67cf-4b5e-ba5c-80b408058823 service nova] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Received event network-vif-plugged-845bbf0e-88f5-474e-b875-0a12bfaebd27 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 656.439269] env[63538]: DEBUG oslo_concurrency.lockutils [req-562f88e0-be7c-46fe-afaf-5c42520c8370 req-dc7b5152-67cf-4b5e-ba5c-80b408058823 service nova] Acquiring lock "47500aaa-92fc-454c-badd-d6f8a2203083-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.444361] env[63538]: DEBUG oslo_concurrency.lockutils [req-562f88e0-be7c-46fe-afaf-5c42520c8370 req-dc7b5152-67cf-4b5e-ba5c-80b408058823 service nova] Lock "47500aaa-92fc-454c-badd-d6f8a2203083-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 656.444690] env[63538]: DEBUG oslo_concurrency.lockutils [req-562f88e0-be7c-46fe-afaf-5c42520c8370 req-dc7b5152-67cf-4b5e-ba5c-80b408058823 service nova] Lock "47500aaa-92fc-454c-badd-d6f8a2203083-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.005s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 656.444794] env[63538]: DEBUG nova.compute.manager [req-562f88e0-be7c-46fe-afaf-5c42520c8370 req-dc7b5152-67cf-4b5e-ba5c-80b408058823 service nova] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] No waiting events found dispatching network-vif-plugged-845bbf0e-88f5-474e-b875-0a12bfaebd27 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 656.445052] env[63538]: WARNING nova.compute.manager [req-562f88e0-be7c-46fe-afaf-5c42520c8370 req-dc7b5152-67cf-4b5e-ba5c-80b408058823 service nova] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Received unexpected event network-vif-plugged-845bbf0e-88f5-474e-b875-0a12bfaebd27 for instance with vm_state building and task_state spawning. [ 656.476575] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.532759] env[63538]: DEBUG oslo_vmware.api [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100545, 'name': ReconfigVM_Task, 'duration_secs': 0.609367} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.533140] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Reconfigured VM instance instance-00000018 to attach disk [datastore1] a6bb8713-6b00-4a43-96b7-a84ee39d790d/a6bb8713-6b00-4a43-96b7-a84ee39d790d.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 656.533916] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75163411-d11a-4729-a761-2259565436a9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.545057] env[63538]: DEBUG oslo_vmware.api [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Waiting for the task: (returnval){ [ 656.545057] env[63538]: value = "task-5100549" [ 656.545057] env[63538]: _type = "Task" [ 656.545057] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.555185] env[63538]: DEBUG oslo_vmware.api [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100549, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.630829] env[63538]: DEBUG oslo_concurrency.lockutils [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Acquiring lock "refresh_cache-47500aaa-92fc-454c-badd-d6f8a2203083" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 656.630829] env[63538]: DEBUG oslo_concurrency.lockutils [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Acquired lock "refresh_cache-47500aaa-92fc-454c-badd-d6f8a2203083" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.630829] env[63538]: DEBUG nova.network.neutron [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 656.630829] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.660980] env[63538]: DEBUG oslo_vmware.api [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Task: {'id': task-5100548, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.285659} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.661693] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 656.661914] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 656.662109] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 656.662314] env[63538]: INFO nova.compute.manager [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Took 1.20 seconds to destroy the instance on the hypervisor. [ 656.662632] env[63538]: DEBUG oslo.service.loopingcall [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 656.662867] env[63538]: DEBUG nova.compute.manager [-] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 656.662971] env[63538]: DEBUG nova.network.neutron [-] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 656.783764] env[63538]: DEBUG nova.objects.instance [None req-30cdfae0-e260-4419-a7f9-28b8ca3aef81 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] Lazy-loading 'flavor' on Instance uuid bf54098e-91a8-403f-a6fe-b58a62daaadb {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 656.808174] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf6b277-35aa-4e7a-acd9-8d04bc447cf4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.826046] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb146498-e64c-4cb7-a144-8df4e6b9aa45 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.864697] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-712e10d6-d51e-4974-becd-8355adceaeba {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.871993] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1b771e-d5e9-4dd2-b282-4d80ce0bf8bb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.895673] env[63538]: DEBUG nova.compute.provider_tree [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 656.946841] env[63538]: DEBUG nova.compute.manager [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 656.983158] env[63538]: DEBUG nova.virt.hardware [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 656.983519] env[63538]: DEBUG nova.virt.hardware [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 656.983698] env[63538]: DEBUG nova.virt.hardware [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 656.983881] env[63538]: DEBUG nova.virt.hardware [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 656.984033] env[63538]: DEBUG nova.virt.hardware [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 656.984187] env[63538]: DEBUG nova.virt.hardware [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 656.984399] env[63538]: DEBUG nova.virt.hardware [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 656.984549] env[63538]: DEBUG nova.virt.hardware [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 656.984711] env[63538]: DEBUG nova.virt.hardware [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 656.984861] env[63538]: DEBUG nova.virt.hardware [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 656.985063] env[63538]: DEBUG nova.virt.hardware [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 656.985979] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2431b8aa-a969-40a8-82da-86e81ee33fcd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.998662] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b23fef61-9eb1-43a8-921c-a9cc2e86714a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.057302] env[63538]: DEBUG oslo_vmware.api [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100549, 'name': Rename_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.219403] env[63538]: DEBUG nova.network.neutron [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 657.289983] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30cdfae0-e260-4419-a7f9-28b8ca3aef81 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] Lock "bf54098e-91a8-403f-a6fe-b58a62daaadb" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.955s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 657.401143] env[63538]: DEBUG nova.scheduler.client.report [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 657.413373] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd68c413-7ca7-4502-9658-547b952a799c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.420802] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-15854ee0-e9ba-4f62-bd72-7f4d6743be9f tempest-ServersAdminNegativeTestJSON-1152758389 tempest-ServersAdminNegativeTestJSON-1152758389-project-admin] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Suspending the VM {{(pid=63538) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1163}} [ 657.420903] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-e67f46ed-5a84-4465-a468-4d30d022ad9a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.429019] env[63538]: DEBUG oslo_vmware.api [None req-15854ee0-e9ba-4f62-bd72-7f4d6743be9f tempest-ServersAdminNegativeTestJSON-1152758389 tempest-ServersAdminNegativeTestJSON-1152758389-project-admin] Waiting for the task: (returnval){ [ 657.429019] env[63538]: value = "task-5100550" [ 657.429019] env[63538]: _type = "Task" [ 657.429019] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.446939] env[63538]: DEBUG oslo_vmware.api [None req-15854ee0-e9ba-4f62-bd72-7f4d6743be9f tempest-ServersAdminNegativeTestJSON-1152758389 tempest-ServersAdminNegativeTestJSON-1152758389-project-admin] Task: {'id': task-5100550, 'name': SuspendVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.559376] env[63538]: DEBUG oslo_vmware.api [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100549, 'name': Rename_Task, 'duration_secs': 0.866522} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.559376] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 657.559376] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d8610fb1-a404-49a0-b1f2-72ded2ede721 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.568720] env[63538]: DEBUG oslo_vmware.api [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Waiting for the task: (returnval){ [ 657.568720] env[63538]: value = "task-5100551" [ 657.568720] env[63538]: _type = "Task" [ 657.568720] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.579547] env[63538]: DEBUG oslo_vmware.api [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100551, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.638807] env[63538]: DEBUG nova.network.neutron [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Updating instance_info_cache with network_info: [{"id": "845bbf0e-88f5-474e-b875-0a12bfaebd27", "address": "fa:16:3e:fe:7c:1e", "network": {"id": "3fa1aaf4-0160-454f-aa6e-0a33051c92b8", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-221507218-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ae58d691f8e4b10a978587c2b8863e1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap845bbf0e-88", "ovs_interfaceid": "845bbf0e-88f5-474e-b875-0a12bfaebd27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.684221] env[63538]: DEBUG nova.network.neutron [-] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.827172] env[63538]: DEBUG nova.compute.manager [req-ffa2d7b6-c81e-4422-aba8-ef8cfea2c9c6 req-96479a2f-e590-4ab3-8d4f-cc4bd9e0924a service nova] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Received event network-vif-deleted-6768804b-2e12-482a-b0c9-b886eaee9afb {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 657.913629] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.018s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 657.914317] env[63538]: DEBUG nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 657.918044] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.370s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.921739] env[63538]: INFO nova.compute.claims [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 657.947110] env[63538]: DEBUG oslo_vmware.api [None req-15854ee0-e9ba-4f62-bd72-7f4d6743be9f tempest-ServersAdminNegativeTestJSON-1152758389 tempest-ServersAdminNegativeTestJSON-1152758389-project-admin] Task: {'id': task-5100550, 'name': SuspendVM_Task} progress is 58%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.088793] env[63538]: DEBUG oslo_vmware.api [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100551, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.143629] env[63538]: DEBUG oslo_concurrency.lockutils [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Releasing lock "refresh_cache-47500aaa-92fc-454c-badd-d6f8a2203083" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 658.143629] env[63538]: DEBUG nova.compute.manager [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Instance network_info: |[{"id": "845bbf0e-88f5-474e-b875-0a12bfaebd27", "address": "fa:16:3e:fe:7c:1e", "network": {"id": "3fa1aaf4-0160-454f-aa6e-0a33051c92b8", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-221507218-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ae58d691f8e4b10a978587c2b8863e1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap845bbf0e-88", "ovs_interfaceid": "845bbf0e-88f5-474e-b875-0a12bfaebd27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 658.143833] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:7c:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'df1bf911-aac9-4d2d-ae69-66ace3e6a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '845bbf0e-88f5-474e-b875-0a12bfaebd27', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 658.154197] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Creating folder: Project (1ae58d691f8e4b10a978587c2b8863e1). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 658.155613] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-683d65b5-b137-4c96-aa18-5bd8514e32ea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.176836] env[63538]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 658.176836] env[63538]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=63538) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 658.177479] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Folder already exists: Project (1ae58d691f8e4b10a978587c2b8863e1). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 658.177766] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Creating folder: Instances. Parent ref: group-v992271. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 658.178043] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8a767189-3905-4ca1-abc8-cad1bf255125 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.187447] env[63538]: INFO nova.compute.manager [-] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Took 1.52 seconds to deallocate network for instance. [ 658.190948] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Created folder: Instances in parent group-v992271. [ 658.191207] env[63538]: DEBUG oslo.service.loopingcall [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 658.195110] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 658.195538] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f43eeb69-2f32-4249-bfc9-8a1b4a090c79 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.218290] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 658.218290] env[63538]: value = "task-5100554" [ 658.218290] env[63538]: _type = "Task" [ 658.218290] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.227962] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100554, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.425386] env[63538]: DEBUG nova.compute.utils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 658.426893] env[63538]: DEBUG nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 658.427408] env[63538]: DEBUG nova.network.neutron [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 658.444434] env[63538]: DEBUG oslo_vmware.api [None req-15854ee0-e9ba-4f62-bd72-7f4d6743be9f tempest-ServersAdminNegativeTestJSON-1152758389 tempest-ServersAdminNegativeTestJSON-1152758389-project-admin] Task: {'id': task-5100550, 'name': SuspendVM_Task, 'duration_secs': 0.675522} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.444887] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-15854ee0-e9ba-4f62-bd72-7f4d6743be9f tempest-ServersAdminNegativeTestJSON-1152758389 tempest-ServersAdminNegativeTestJSON-1152758389-project-admin] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Suspended the VM {{(pid=63538) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1167}} [ 658.445122] env[63538]: DEBUG nova.compute.manager [None req-15854ee0-e9ba-4f62-bd72-7f4d6743be9f tempest-ServersAdminNegativeTestJSON-1152758389 tempest-ServersAdminNegativeTestJSON-1152758389-project-admin] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 658.446378] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd668905-b2ee-446f-8c5b-93c58d2ab4ce {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.518648] env[63538]: DEBUG nova.network.neutron [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Successfully updated port: dc774ce7-f5aa-452a-828d-e56e0339fe56 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 658.586680] env[63538]: DEBUG oslo_vmware.api [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100551, 'name': PowerOnVM_Task, 'duration_secs': 0.610183} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.588882] env[63538]: DEBUG nova.policy [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '15bca987c1374dc4a427e80bf03f19ee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '452b39ccca6b4fcba39b1e61f0508f14', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 658.591281] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 658.592800] env[63538]: INFO nova.compute.manager [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Took 10.08 seconds to spawn the instance on the hypervisor. [ 658.593232] env[63538]: DEBUG nova.compute.manager [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 658.597403] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c95b7d7-2b0b-4b51-abbc-4a74722904b7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.696993] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 658.733602] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100554, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.844441] env[63538]: DEBUG nova.compute.manager [req-562760db-415b-4f0c-aa5d-abfc3cb3d8a4 req-4a31d36a-4a09-418c-a2dd-a1c991ff1e26 service nova] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Received event network-changed-845bbf0e-88f5-474e-b875-0a12bfaebd27 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 658.844639] env[63538]: DEBUG nova.compute.manager [req-562760db-415b-4f0c-aa5d-abfc3cb3d8a4 req-4a31d36a-4a09-418c-a2dd-a1c991ff1e26 service nova] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Refreshing instance network info cache due to event network-changed-845bbf0e-88f5-474e-b875-0a12bfaebd27. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 658.844854] env[63538]: DEBUG oslo_concurrency.lockutils [req-562760db-415b-4f0c-aa5d-abfc3cb3d8a4 req-4a31d36a-4a09-418c-a2dd-a1c991ff1e26 service nova] Acquiring lock "refresh_cache-47500aaa-92fc-454c-badd-d6f8a2203083" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 658.845082] env[63538]: DEBUG oslo_concurrency.lockutils [req-562760db-415b-4f0c-aa5d-abfc3cb3d8a4 req-4a31d36a-4a09-418c-a2dd-a1c991ff1e26 service nova] Acquired lock "refresh_cache-47500aaa-92fc-454c-badd-d6f8a2203083" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.845270] env[63538]: DEBUG nova.network.neutron [req-562760db-415b-4f0c-aa5d-abfc3cb3d8a4 req-4a31d36a-4a09-418c-a2dd-a1c991ff1e26 service nova] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Refreshing network info cache for port 845bbf0e-88f5-474e-b875-0a12bfaebd27 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 658.933534] env[63538]: DEBUG nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 659.022925] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "refresh_cache-b5593b74-fe89-43f5-a8c6-e73159b4efac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.023095] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquired lock "refresh_cache-b5593b74-fe89-43f5-a8c6-e73159b4efac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.023251] env[63538]: DEBUG nova.network.neutron [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 659.133342] env[63538]: INFO nova.compute.manager [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Took 44.35 seconds to build instance. [ 659.223051] env[63538]: DEBUG oslo_concurrency.lockutils [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "466be7db-79e4-49fd-aa3b-56fbe5c60457" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.223404] env[63538]: DEBUG oslo_concurrency.lockutils [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "466be7db-79e4-49fd-aa3b-56fbe5c60457" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.238681] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100554, 'name': CreateVM_Task, 'duration_secs': 0.682949} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.238857] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 659.239820] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992278', 'volume_id': '9cb99482-8e0b-49d1-9249-8f411b389c09', 'name': 'volume-9cb99482-8e0b-49d1-9249-8f411b389c09', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '47500aaa-92fc-454c-badd-d6f8a2203083', 'attached_at': '', 'detached_at': '', 'volume_id': '9cb99482-8e0b-49d1-9249-8f411b389c09', 'serial': '9cb99482-8e0b-49d1-9249-8f411b389c09'}, 'delete_on_termination': True, 'attachment_id': '1bb2a248-daa4-46c1-94ab-dbf0f6147bc7', 'guest_format': None, 'mount_device': '/dev/sda', 'device_type': None, 'boot_index': 0, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=63538) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 659.239820] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Root volume attach. Driver type: vmdk {{(pid=63538) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 659.240669] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c594ed-957a-4add-9bac-10271df68d70 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.263480] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76f8ab4e-ccc9-4c9e-a31c-b35843405ee3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.278755] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1722572a-0556-438b-a128-0b5f1ab7a4e2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.282089] env[63538]: DEBUG nova.network.neutron [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Successfully created port: eb08b565-8194-4325-b89d-38b0f6615179 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 659.289309] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-e8809222-6162-4c8a-bb22-5a911b88984e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.294593] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b86ef4f6-b5da-4b72-98cc-7b168f05f2b7 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] Acquiring lock "bf54098e-91a8-403f-a6fe-b58a62daaadb" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.294844] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b86ef4f6-b5da-4b72-98cc-7b168f05f2b7 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] Lock "bf54098e-91a8-403f-a6fe-b58a62daaadb" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.305017] env[63538]: DEBUG oslo_vmware.api [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Waiting for the task: (returnval){ [ 659.305017] env[63538]: value = "task-5100555" [ 659.305017] env[63538]: _type = "Task" [ 659.305017] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.319615] env[63538]: DEBUG oslo_vmware.api [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100555, 'name': RelocateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.571989] env[63538]: DEBUG nova.network.neutron [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 659.636814] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2f2bd0d1-d31d-4fcc-8de9-2ed3d76093b8 tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Lock "a6bb8713-6b00-4a43-96b7-a84ee39d790d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.537s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.700969] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1733aa6-8638-48de-a37f-801ce35ef688 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.722032] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1120cc-86e2-41bd-a7ab-591a6a2d3d9f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.764909] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Acquiring lock "102c0463-fb64-4dda-914c-b98c8e9991ad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.765171] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Lock "102c0463-fb64-4dda-914c-b98c8e9991ad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.765404] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Acquiring lock "102c0463-fb64-4dda-914c-b98c8e9991ad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.765631] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Lock "102c0463-fb64-4dda-914c-b98c8e9991ad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.765825] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Lock "102c0463-fb64-4dda-914c-b98c8e9991ad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.772381] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a5d34f-1b5b-4e19-93fc-22ebdb43e2bb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.776070] env[63538]: INFO nova.compute.manager [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Terminating instance [ 659.782140] env[63538]: DEBUG nova.compute.manager [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 659.782380] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 659.783994] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31c2ca97-6415-4084-9c7a-3e5eb0e8eeb1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.791307] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2bbe0f-132d-4475-8312-e4bad9e2cfa0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.799274] env[63538]: INFO nova.compute.manager [None req-b86ef4f6-b5da-4b72-98cc-7b168f05f2b7 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Detaching volume dc9f1261-2253-4cc1-91b1-e12aa81cc11d [ 659.802288] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 659.802288] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-27a3060b-1319-43e4-8c8f-a07f2e27b253 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.821462] env[63538]: DEBUG nova.compute.provider_tree [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 659.823308] env[63538]: DEBUG oslo_vmware.api [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Waiting for the task: (returnval){ [ 659.823308] env[63538]: value = "task-5100556" [ 659.823308] env[63538]: _type = "Task" [ 659.823308] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.833333] env[63538]: DEBUG oslo_vmware.api [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100555, 'name': RelocateVM_Task} progress is 42%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.843493] env[63538]: DEBUG oslo_vmware.api [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Task: {'id': task-5100556, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.844803] env[63538]: INFO nova.virt.block_device [None req-b86ef4f6-b5da-4b72-98cc-7b168f05f2b7 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Attempting to driver detach volume dc9f1261-2253-4cc1-91b1-e12aa81cc11d from mountpoint /dev/sdb [ 659.845054] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b86ef4f6-b5da-4b72-98cc-7b168f05f2b7 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Volume detach. Driver type: vmdk {{(pid=63538) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 659.845316] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b86ef4f6-b5da-4b72-98cc-7b168f05f2b7 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992302', 'volume_id': 'dc9f1261-2253-4cc1-91b1-e12aa81cc11d', 'name': 'volume-dc9f1261-2253-4cc1-91b1-e12aa81cc11d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bf54098e-91a8-403f-a6fe-b58a62daaadb', 'attached_at': '', 'detached_at': '', 'volume_id': 'dc9f1261-2253-4cc1-91b1-e12aa81cc11d', 'serial': 'dc9f1261-2253-4cc1-91b1-e12aa81cc11d'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 659.846148] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-636a81e4-9791-439f-a130-c9743d20ee98 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.873578] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-668cd7af-21b8-4428-82f9-f4e501d7acff {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.879774] env[63538]: DEBUG nova.network.neutron [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Updating instance_info_cache with network_info: [{"id": "dc774ce7-f5aa-452a-828d-e56e0339fe56", "address": "fa:16:3e:3c:f8:6c", "network": {"id": "c4ac48b0-a0c2-4910-bcb9-3f42d415030f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2107782775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdfc96ac41be43f9ba0596444eb75737", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc774ce7-f5", "ovs_interfaceid": "dc774ce7-f5aa-452a-828d-e56e0339fe56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.884390] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d3ddd11-010c-48d3-9cb7-6a2cf7845852 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.909273] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e4797f3-c6ce-4612-9a47-296e02aa689a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.930858] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b86ef4f6-b5da-4b72-98cc-7b168f05f2b7 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] The volume has not been displaced from its original location: [datastore1] volume-dc9f1261-2253-4cc1-91b1-e12aa81cc11d/volume-dc9f1261-2253-4cc1-91b1-e12aa81cc11d.vmdk. No consolidation needed. {{(pid=63538) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 659.936494] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b86ef4f6-b5da-4b72-98cc-7b168f05f2b7 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Reconfiguring VM instance instance-0000000e to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 659.936913] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d0dbc988-411e-4c2c-83c7-f7f9d12c4ee5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.951938] env[63538]: DEBUG nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 659.963811] env[63538]: DEBUG oslo_vmware.api [None req-b86ef4f6-b5da-4b72-98cc-7b168f05f2b7 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] Waiting for the task: (returnval){ [ 659.963811] env[63538]: value = "task-5100557" [ 659.963811] env[63538]: _type = "Task" [ 659.963811] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.975362] env[63538]: DEBUG oslo_vmware.api [None req-b86ef4f6-b5da-4b72-98cc-7b168f05f2b7 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] Task: {'id': task-5100557, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.989835] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 659.989952] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 659.990141] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 659.990271] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 659.990424] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 659.990607] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 659.990903] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 659.991140] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 659.991327] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 659.991508] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 659.991707] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 659.993054] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e556e19-50bc-40dd-9a84-5c1f91bf3040 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.005235] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-126f5fef-e4db-489b-a8a7-0bd4eee24234 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.143295] env[63538]: DEBUG nova.compute.manager [None req-0b5f79ad-08b9-4663-bb67-2f73c7234810 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cebb39b7-1e2d-4460-8281-22a75355f4d2] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 660.322233] env[63538]: DEBUG oslo_vmware.api [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100555, 'name': RelocateVM_Task} progress is 53%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.328022] env[63538]: DEBUG nova.scheduler.client.report [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 660.339742] env[63538]: DEBUG nova.compute.manager [req-09db54ea-dba4-4346-936c-24e49a2efd46 req-a99d26c4-12a9-47b6-9632-d8184900eb73 service nova] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Received event network-vif-plugged-dc774ce7-f5aa-452a-828d-e56e0339fe56 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 660.340230] env[63538]: DEBUG oslo_concurrency.lockutils [req-09db54ea-dba4-4346-936c-24e49a2efd46 req-a99d26c4-12a9-47b6-9632-d8184900eb73 service nova] Acquiring lock "b5593b74-fe89-43f5-a8c6-e73159b4efac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 660.341731] env[63538]: DEBUG oslo_concurrency.lockutils [req-09db54ea-dba4-4346-936c-24e49a2efd46 req-a99d26c4-12a9-47b6-9632-d8184900eb73 service nova] Lock "b5593b74-fe89-43f5-a8c6-e73159b4efac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 660.341731] env[63538]: DEBUG oslo_concurrency.lockutils [req-09db54ea-dba4-4346-936c-24e49a2efd46 req-a99d26c4-12a9-47b6-9632-d8184900eb73 service nova] Lock "b5593b74-fe89-43f5-a8c6-e73159b4efac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 660.341731] env[63538]: DEBUG nova.compute.manager [req-09db54ea-dba4-4346-936c-24e49a2efd46 req-a99d26c4-12a9-47b6-9632-d8184900eb73 service nova] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] No waiting events found dispatching network-vif-plugged-dc774ce7-f5aa-452a-828d-e56e0339fe56 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 660.341731] env[63538]: WARNING nova.compute.manager [req-09db54ea-dba4-4346-936c-24e49a2efd46 req-a99d26c4-12a9-47b6-9632-d8184900eb73 service nova] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Received unexpected event network-vif-plugged-dc774ce7-f5aa-452a-828d-e56e0339fe56 for instance with vm_state building and task_state spawning. [ 660.341731] env[63538]: DEBUG nova.compute.manager [req-09db54ea-dba4-4346-936c-24e49a2efd46 req-a99d26c4-12a9-47b6-9632-d8184900eb73 service nova] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Received event network-changed-dc774ce7-f5aa-452a-828d-e56e0339fe56 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 660.341993] env[63538]: DEBUG nova.compute.manager [req-09db54ea-dba4-4346-936c-24e49a2efd46 req-a99d26c4-12a9-47b6-9632-d8184900eb73 service nova] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Refreshing instance network info cache due to event network-changed-dc774ce7-f5aa-452a-828d-e56e0339fe56. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 660.341993] env[63538]: DEBUG oslo_concurrency.lockutils [req-09db54ea-dba4-4346-936c-24e49a2efd46 req-a99d26c4-12a9-47b6-9632-d8184900eb73 service nova] Acquiring lock "refresh_cache-b5593b74-fe89-43f5-a8c6-e73159b4efac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 660.343326] env[63538]: DEBUG nova.network.neutron [req-562760db-415b-4f0c-aa5d-abfc3cb3d8a4 req-4a31d36a-4a09-418c-a2dd-a1c991ff1e26 service nova] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Updated VIF entry in instance network info cache for port 845bbf0e-88f5-474e-b875-0a12bfaebd27. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 660.343500] env[63538]: DEBUG nova.network.neutron [req-562760db-415b-4f0c-aa5d-abfc3cb3d8a4 req-4a31d36a-4a09-418c-a2dd-a1c991ff1e26 service nova] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Updating instance_info_cache with network_info: [{"id": "845bbf0e-88f5-474e-b875-0a12bfaebd27", "address": "fa:16:3e:fe:7c:1e", "network": {"id": "3fa1aaf4-0160-454f-aa6e-0a33051c92b8", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-221507218-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ae58d691f8e4b10a978587c2b8863e1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap845bbf0e-88", "ovs_interfaceid": "845bbf0e-88f5-474e-b875-0a12bfaebd27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.348275] env[63538]: DEBUG oslo_vmware.api [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Task: {'id': task-5100556, 'name': PowerOffVM_Task, 'duration_secs': 0.267954} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.349274] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 660.349274] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 660.349367] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f2cedb80-4caf-41ec-926e-d7b6c3863977 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.384744] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Releasing lock "refresh_cache-b5593b74-fe89-43f5-a8c6-e73159b4efac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 660.384744] env[63538]: DEBUG nova.compute.manager [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Instance network_info: |[{"id": "dc774ce7-f5aa-452a-828d-e56e0339fe56", "address": "fa:16:3e:3c:f8:6c", "network": {"id": "c4ac48b0-a0c2-4910-bcb9-3f42d415030f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2107782775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdfc96ac41be43f9ba0596444eb75737", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc774ce7-f5", "ovs_interfaceid": "dc774ce7-f5aa-452a-828d-e56e0339fe56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 660.384881] env[63538]: DEBUG oslo_concurrency.lockutils [req-09db54ea-dba4-4346-936c-24e49a2efd46 req-a99d26c4-12a9-47b6-9632-d8184900eb73 service nova] Acquired lock "refresh_cache-b5593b74-fe89-43f5-a8c6-e73159b4efac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.384881] env[63538]: DEBUG nova.network.neutron [req-09db54ea-dba4-4346-936c-24e49a2efd46 req-a99d26c4-12a9-47b6-9632-d8184900eb73 service nova] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Refreshing network info cache for port dc774ce7-f5aa-452a-828d-e56e0339fe56 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 660.385019] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:f8:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c894ab55-c869-4530-9702-cb46d173ce94', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dc774ce7-f5aa-452a-828d-e56e0339fe56', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 660.394480] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Creating folder: Project (cdfc96ac41be43f9ba0596444eb75737). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 660.395165] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f3620e39-1839-42d0-8843-22c17a230a09 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.409161] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Created folder: Project (cdfc96ac41be43f9ba0596444eb75737) in parent group-v992234. [ 660.409326] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Creating folder: Instances. Parent ref: group-v992306. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 660.409607] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f22e5a9b-84fe-4ab8-8ff0-7cea5923a7ea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.421788] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Created folder: Instances in parent group-v992306. [ 660.422184] env[63538]: DEBUG oslo.service.loopingcall [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 660.422453] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 660.422735] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-74c0a245-1e55-4e21-a958-2d488e79709c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.441276] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 660.441894] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 660.442162] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Deleting the datastore file [datastore1] 102c0463-fb64-4dda-914c-b98c8e9991ad {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 660.442861] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58e936d6-3f52-4bc4-aa18-7c5a70115700 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.448255] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 660.448255] env[63538]: value = "task-5100561" [ 660.448255] env[63538]: _type = "Task" [ 660.448255] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.453795] env[63538]: DEBUG oslo_vmware.api [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Waiting for the task: (returnval){ [ 660.453795] env[63538]: value = "task-5100562" [ 660.453795] env[63538]: _type = "Task" [ 660.453795] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.460817] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100561, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.467827] env[63538]: DEBUG oslo_vmware.api [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Task: {'id': task-5100562, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.479980] env[63538]: DEBUG oslo_vmware.api [None req-b86ef4f6-b5da-4b72-98cc-7b168f05f2b7 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] Task: {'id': task-5100557, 'name': ReconfigVM_Task, 'duration_secs': 0.317947} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.480297] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b86ef4f6-b5da-4b72-98cc-7b168f05f2b7 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Reconfigured VM instance instance-0000000e to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 660.487913] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9f32c627-9d77-46b8-bc4a-6fe1ba4a1979 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.509379] env[63538]: DEBUG oslo_vmware.api [None req-b86ef4f6-b5da-4b72-98cc-7b168f05f2b7 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] Waiting for the task: (returnval){ [ 660.509379] env[63538]: value = "task-5100563" [ 660.509379] env[63538]: _type = "Task" [ 660.509379] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.523946] env[63538]: DEBUG oslo_vmware.api [None req-b86ef4f6-b5da-4b72-98cc-7b168f05f2b7 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] Task: {'id': task-5100563, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.648967] env[63538]: DEBUG nova.compute.manager [None req-0b5f79ad-08b9-4663-bb67-2f73c7234810 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cebb39b7-1e2d-4460-8281-22a75355f4d2] Instance disappeared before build. {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2440}} [ 660.821595] env[63538]: DEBUG oslo_vmware.api [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100555, 'name': RelocateVM_Task} progress is 65%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.836117] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.918s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 660.839079] env[63538]: DEBUG nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 660.842825] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.998s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 660.844707] env[63538]: INFO nova.compute.claims [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 660.850869] env[63538]: DEBUG oslo_concurrency.lockutils [req-562760db-415b-4f0c-aa5d-abfc3cb3d8a4 req-4a31d36a-4a09-418c-a2dd-a1c991ff1e26 service nova] Releasing lock "refresh_cache-47500aaa-92fc-454c-badd-d6f8a2203083" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 660.850869] env[63538]: DEBUG nova.compute.manager [req-562760db-415b-4f0c-aa5d-abfc3cb3d8a4 req-4a31d36a-4a09-418c-a2dd-a1c991ff1e26 service nova] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Received event network-vif-deleted-7b43dc6d-d5bd-406d-8860-46abe9635ab5 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 660.968548] env[63538]: DEBUG oslo_vmware.api [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Task: {'id': task-5100562, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.289809} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.976823] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 660.977257] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 660.978369] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 660.978369] env[63538]: INFO nova.compute.manager [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Took 1.20 seconds to destroy the instance on the hypervisor. [ 660.978369] env[63538]: DEBUG oslo.service.loopingcall [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 660.978369] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100561, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.978369] env[63538]: DEBUG nova.compute.manager [-] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 660.978577] env[63538]: DEBUG nova.network.neutron [-] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 661.025691] env[63538]: DEBUG oslo_vmware.api [None req-b86ef4f6-b5da-4b72-98cc-7b168f05f2b7 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] Task: {'id': task-5100563, 'name': ReconfigVM_Task, 'duration_secs': 0.198128} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.026037] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b86ef4f6-b5da-4b72-98cc-7b168f05f2b7 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992302', 'volume_id': 'dc9f1261-2253-4cc1-91b1-e12aa81cc11d', 'name': 'volume-dc9f1261-2253-4cc1-91b1-e12aa81cc11d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bf54098e-91a8-403f-a6fe-b58a62daaadb', 'attached_at': '', 'detached_at': '', 'volume_id': 'dc9f1261-2253-4cc1-91b1-e12aa81cc11d', 'serial': 'dc9f1261-2253-4cc1-91b1-e12aa81cc11d'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 661.169523] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0b5f79ad-08b9-4663-bb67-2f73c7234810 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "cebb39b7-1e2d-4460-8281-22a75355f4d2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.342s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 661.301419] env[63538]: DEBUG nova.network.neutron [req-09db54ea-dba4-4346-936c-24e49a2efd46 req-a99d26c4-12a9-47b6-9632-d8184900eb73 service nova] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Updated VIF entry in instance network info cache for port dc774ce7-f5aa-452a-828d-e56e0339fe56. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 661.301713] env[63538]: DEBUG nova.network.neutron [req-09db54ea-dba4-4346-936c-24e49a2efd46 req-a99d26c4-12a9-47b6-9632-d8184900eb73 service nova] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Updating instance_info_cache with network_info: [{"id": "dc774ce7-f5aa-452a-828d-e56e0339fe56", "address": "fa:16:3e:3c:f8:6c", "network": {"id": "c4ac48b0-a0c2-4910-bcb9-3f42d415030f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2107782775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdfc96ac41be43f9ba0596444eb75737", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc774ce7-f5", "ovs_interfaceid": "dc774ce7-f5aa-452a-828d-e56e0339fe56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.324777] env[63538]: DEBUG oslo_vmware.api [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100555, 'name': RelocateVM_Task} progress is 78%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.351281] env[63538]: DEBUG nova.compute.utils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 661.356334] env[63538]: DEBUG nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 661.356374] env[63538]: DEBUG nova.network.neutron [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 661.431706] env[63538]: DEBUG nova.policy [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '15bca987c1374dc4a427e80bf03f19ee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '452b39ccca6b4fcba39b1e61f0508f14', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 661.467863] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100561, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.607687] env[63538]: DEBUG nova.objects.instance [None req-b86ef4f6-b5da-4b72-98cc-7b168f05f2b7 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] Lazy-loading 'flavor' on Instance uuid bf54098e-91a8-403f-a6fe-b58a62daaadb {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 661.674044] env[63538]: DEBUG nova.compute.manager [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 661.805406] env[63538]: DEBUG oslo_concurrency.lockutils [req-09db54ea-dba4-4346-936c-24e49a2efd46 req-a99d26c4-12a9-47b6-9632-d8184900eb73 service nova] Releasing lock "refresh_cache-b5593b74-fe89-43f5-a8c6-e73159b4efac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 661.824835] env[63538]: DEBUG oslo_vmware.api [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100555, 'name': RelocateVM_Task} progress is 92%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.840738] env[63538]: DEBUG oslo_concurrency.lockutils [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Acquiring lock "a6bb8713-6b00-4a43-96b7-a84ee39d790d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.841030] env[63538]: DEBUG oslo_concurrency.lockutils [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Lock "a6bb8713-6b00-4a43-96b7-a84ee39d790d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.841347] env[63538]: DEBUG oslo_concurrency.lockutils [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Acquiring lock "a6bb8713-6b00-4a43-96b7-a84ee39d790d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.841694] env[63538]: DEBUG oslo_concurrency.lockutils [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Lock "a6bb8713-6b00-4a43-96b7-a84ee39d790d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.841818] env[63538]: DEBUG oslo_concurrency.lockutils [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Lock "a6bb8713-6b00-4a43-96b7-a84ee39d790d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 661.844454] env[63538]: INFO nova.compute.manager [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Terminating instance [ 661.846688] env[63538]: DEBUG nova.compute.manager [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 661.847027] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 661.848728] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f11e75e-dc83-41e1-bdc2-eacd3a2a4c13 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.857558] env[63538]: DEBUG nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 661.867164] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 661.869685] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d447c74d-df24-46e1-80d1-1ff450e0b661 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.879358] env[63538]: DEBUG oslo_vmware.api [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Waiting for the task: (returnval){ [ 661.879358] env[63538]: value = "task-5100564" [ 661.879358] env[63538]: _type = "Task" [ 661.879358] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.892233] env[63538]: DEBUG oslo_vmware.api [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100564, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.967901] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100561, 'name': CreateVM_Task, 'duration_secs': 1.464836} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.968125] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 661.969265] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 661.969493] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.969797] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 661.970085] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1525dda1-5c16-4548-8d16-eefa1dbaac92 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.982381] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 661.982381] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52fd25c0-0e19-aeb1-0c99-9dc5d5148aab" [ 661.982381] env[63538]: _type = "Task" [ 661.982381] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.993847] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52fd25c0-0e19-aeb1-0c99-9dc5d5148aab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.214113] env[63538]: DEBUG nova.network.neutron [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Successfully created port: 42827ce2-838a-4a4a-b29a-40f1d29e7a3e {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 662.214629] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.308904] env[63538]: DEBUG nova.network.neutron [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Successfully updated port: eb08b565-8194-4325-b89d-38b0f6615179 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 662.327892] env[63538]: DEBUG oslo_vmware.api [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100555, 'name': RelocateVM_Task} progress is 97%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.392411] env[63538]: DEBUG oslo_vmware.api [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100564, 'name': PowerOffVM_Task, 'duration_secs': 0.28889} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.393040] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 662.393040] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 662.393575] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-09d5cd9d-441b-46ce-bd08-c9c846ca5c16 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.464950] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 662.465501] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 662.465906] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Deleting the datastore file [datastore1] a6bb8713-6b00-4a43-96b7-a84ee39d790d {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 662.466029] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-181bd8de-52ef-4fac-a08f-d4cc6d757c36 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.477625] env[63538]: DEBUG oslo_vmware.api [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Waiting for the task: (returnval){ [ 662.477625] env[63538]: value = "task-5100566" [ 662.477625] env[63538]: _type = "Task" [ 662.477625] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.490941] env[63538]: DEBUG oslo_vmware.api [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100566, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.500361] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52fd25c0-0e19-aeb1-0c99-9dc5d5148aab, 'name': SearchDatastore_Task, 'duration_secs': 0.026433} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.500721] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 662.500961] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 662.501205] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 662.502121] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.502121] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 662.502121] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-81ebad00-570a-4699-bb77-b08bec037f71 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.511366] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 662.511564] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 662.512609] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23f00c9a-66aa-4f0b-9b4f-3e97521d18c0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.518941] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 662.518941] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]529d1ef8-b7d0-e7f8-5d66-cd3681efeeb6" [ 662.518941] env[63538]: _type = "Task" [ 662.518941] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.531575] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]529d1ef8-b7d0-e7f8-5d66-cd3681efeeb6, 'name': SearchDatastore_Task} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.532846] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-685a2fc9-0dda-45ea-8ea8-6b31f21eadc8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.541097] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 662.541097] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]528c7c8d-e3ce-f43a-55cc-0c0c54e25bd3" [ 662.541097] env[63538]: _type = "Task" [ 662.541097] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.541357] env[63538]: DEBUG nova.network.neutron [-] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.552909] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]528c7c8d-e3ce-f43a-55cc-0c0c54e25bd3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.618059] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b86ef4f6-b5da-4b72-98cc-7b168f05f2b7 tempest-VolumesAssistedSnapshotsTest-1896584251 tempest-VolumesAssistedSnapshotsTest-1896584251-project-admin] Lock "bf54098e-91a8-403f-a6fe-b58a62daaadb" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.323s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 662.626904] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88264ef9-ab59-44c9-9da4-dc55f03e479f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.637779] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb81258-2c86-48b3-9fcb-eed5e57c8196 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.680060] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4410328-12fe-4bc1-babc-46eba8c08803 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.690450] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ca9c2e-39ae-403d-9214-dadfe966a65e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.709724] env[63538]: DEBUG nova.compute.provider_tree [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 662.815443] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquiring lock "refresh_cache-ee9fe572-7a17-46db-8330-4b6f632c6b2c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 662.815443] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquired lock "refresh_cache-ee9fe572-7a17-46db-8330-4b6f632c6b2c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.815443] env[63538]: DEBUG nova.network.neutron [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 662.831239] env[63538]: DEBUG oslo_vmware.api [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100555, 'name': RelocateVM_Task} progress is 98%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.878790] env[63538]: DEBUG nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 662.886191] env[63538]: DEBUG nova.compute.manager [req-8376b949-c6e4-4491-a227-ff7c6e3baf40 req-20a88812-be3d-4f90-abfb-99efeaba5e57 service nova] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Received event network-vif-deleted-4fd02a9a-ba01-4841-a942-ca1b96503c0e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 662.924333] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 662.926213] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 662.926213] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 662.926213] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 662.926213] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 662.926213] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 662.926897] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 662.926897] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 662.926897] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 662.926897] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 662.926897] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 662.928129] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1e86040-4b16-4d06-a5d4-c22494aa68c1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.937815] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3b015ab-d475-4a69-ad5d-2d4e2ce5cd3c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.957644] env[63538]: DEBUG oslo_concurrency.lockutils [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Acquiring lock "543875b5-195a-476d-a0b4-3211ceefa27f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.957833] env[63538]: DEBUG oslo_concurrency.lockutils [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Lock "543875b5-195a-476d-a0b4-3211ceefa27f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.958117] env[63538]: DEBUG oslo_concurrency.lockutils [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Acquiring lock "543875b5-195a-476d-a0b4-3211ceefa27f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.958340] env[63538]: DEBUG oslo_concurrency.lockutils [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Lock "543875b5-195a-476d-a0b4-3211ceefa27f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.958898] env[63538]: DEBUG oslo_concurrency.lockutils [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Lock "543875b5-195a-476d-a0b4-3211ceefa27f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 662.964285] env[63538]: INFO nova.compute.manager [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Terminating instance [ 662.965901] env[63538]: DEBUG nova.compute.manager [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 662.966155] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 662.967683] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abad85c5-6775-48a8-8638-44c0d9cb181c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.975669] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 662.976500] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a3a6f234-c42d-420c-942a-42c575544c02 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.983491] env[63538]: DEBUG oslo_vmware.api [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Waiting for the task: (returnval){ [ 662.983491] env[63538]: value = "task-5100568" [ 662.983491] env[63538]: _type = "Task" [ 662.983491] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.986779] env[63538]: DEBUG oslo_vmware.api [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Task: {'id': task-5100566, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15905} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.990359] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 662.991055] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 662.991055] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 662.991055] env[63538]: INFO nova.compute.manager [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Took 1.14 seconds to destroy the instance on the hypervisor. [ 662.991327] env[63538]: DEBUG oslo.service.loopingcall [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 662.994485] env[63538]: DEBUG nova.compute.manager [-] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 662.994485] env[63538]: DEBUG nova.network.neutron [-] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 663.002116] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "46e2c1f4-edf7-45d6-ba77-c872005fcf1b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.002116] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "46e2c1f4-edf7-45d6-ba77-c872005fcf1b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.004960] env[63538]: DEBUG oslo_vmware.api [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Task: {'id': task-5100568, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.039467] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "e4b94aa7-7434-4a6e-b6d3-ed02315c435f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.039467] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "e4b94aa7-7434-4a6e-b6d3-ed02315c435f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.048957] env[63538]: INFO nova.compute.manager [-] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Took 2.07 seconds to deallocate network for instance. [ 663.058298] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]528c7c8d-e3ce-f43a-55cc-0c0c54e25bd3, 'name': SearchDatastore_Task, 'duration_secs': 0.012492} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.061013] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.061300] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] b5593b74-fe89-43f5-a8c6-e73159b4efac/b5593b74-fe89-43f5-a8c6-e73159b4efac.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 663.061596] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7bd7bdcc-b783-420f-a0cc-33e45a9fc218 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.073153] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 663.073153] env[63538]: value = "task-5100569" [ 663.073153] env[63538]: _type = "Task" [ 663.073153] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.084258] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100569, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.212796] env[63538]: DEBUG nova.scheduler.client.report [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 663.251875] env[63538]: DEBUG nova.compute.manager [req-00c94926-de7c-4d46-8721-695126cd4d25 req-f48bcfc4-9c7a-4402-a0b7-897616aa2e23 service nova] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Received event network-vif-plugged-eb08b565-8194-4325-b89d-38b0f6615179 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 663.252138] env[63538]: DEBUG oslo_concurrency.lockutils [req-00c94926-de7c-4d46-8721-695126cd4d25 req-f48bcfc4-9c7a-4402-a0b7-897616aa2e23 service nova] Acquiring lock "ee9fe572-7a17-46db-8330-4b6f632c6b2c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.252379] env[63538]: DEBUG oslo_concurrency.lockutils [req-00c94926-de7c-4d46-8721-695126cd4d25 req-f48bcfc4-9c7a-4402-a0b7-897616aa2e23 service nova] Lock "ee9fe572-7a17-46db-8330-4b6f632c6b2c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.252495] env[63538]: DEBUG oslo_concurrency.lockutils [req-00c94926-de7c-4d46-8721-695126cd4d25 req-f48bcfc4-9c7a-4402-a0b7-897616aa2e23 service nova] Lock "ee9fe572-7a17-46db-8330-4b6f632c6b2c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 663.252624] env[63538]: DEBUG nova.compute.manager [req-00c94926-de7c-4d46-8721-695126cd4d25 req-f48bcfc4-9c7a-4402-a0b7-897616aa2e23 service nova] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] No waiting events found dispatching network-vif-plugged-eb08b565-8194-4325-b89d-38b0f6615179 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 663.252818] env[63538]: WARNING nova.compute.manager [req-00c94926-de7c-4d46-8721-695126cd4d25 req-f48bcfc4-9c7a-4402-a0b7-897616aa2e23 service nova] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Received unexpected event network-vif-plugged-eb08b565-8194-4325-b89d-38b0f6615179 for instance with vm_state building and task_state spawning. [ 663.252977] env[63538]: DEBUG nova.compute.manager [req-00c94926-de7c-4d46-8721-695126cd4d25 req-f48bcfc4-9c7a-4402-a0b7-897616aa2e23 service nova] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Received event network-changed-eb08b565-8194-4325-b89d-38b0f6615179 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 663.253267] env[63538]: DEBUG nova.compute.manager [req-00c94926-de7c-4d46-8721-695126cd4d25 req-f48bcfc4-9c7a-4402-a0b7-897616aa2e23 service nova] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Refreshing instance network info cache due to event network-changed-eb08b565-8194-4325-b89d-38b0f6615179. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 663.253455] env[63538]: DEBUG oslo_concurrency.lockutils [req-00c94926-de7c-4d46-8721-695126cd4d25 req-f48bcfc4-9c7a-4402-a0b7-897616aa2e23 service nova] Acquiring lock "refresh_cache-ee9fe572-7a17-46db-8330-4b6f632c6b2c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.334292] env[63538]: DEBUG oslo_vmware.api [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100555, 'name': RelocateVM_Task} progress is 98%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.359450] env[63538]: DEBUG nova.network.neutron [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 663.498618] env[63538]: DEBUG oslo_vmware.api [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Task: {'id': task-5100568, 'name': PowerOffVM_Task, 'duration_secs': 0.295409} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.499890] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 663.499890] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 663.499890] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cee0963e-9ea5-482c-a37f-6efec09e76b2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.562937] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.587118] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100569, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.591293] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 663.591698] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 663.592054] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Deleting the datastore file [datastore1] 543875b5-195a-476d-a0b4-3211ceefa27f {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 663.593612] env[63538]: DEBUG nova.network.neutron [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Updating instance_info_cache with network_info: [{"id": "eb08b565-8194-4325-b89d-38b0f6615179", "address": "fa:16:3e:29:79:e8", "network": {"id": "1ffc7eb0-0516-45ff-a0e3-3f2887487b78", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-550035383-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "452b39ccca6b4fcba39b1e61f0508f14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f256cfee-512d-4192-9aca-6750fdb1cd4c", "external-id": "nsx-vlan-transportzone-821", "segmentation_id": 821, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb08b565-81", "ovs_interfaceid": "eb08b565-8194-4325-b89d-38b0f6615179", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.594977] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3299db0e-8ba8-4e08-8b9f-cbf51eac608e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.604354] env[63538]: DEBUG oslo_vmware.api [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Waiting for the task: (returnval){ [ 663.604354] env[63538]: value = "task-5100571" [ 663.604354] env[63538]: _type = "Task" [ 663.604354] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.614420] env[63538]: DEBUG oslo_vmware.api [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Task: {'id': task-5100571, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.718066] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.875s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 663.718508] env[63538]: DEBUG nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 663.722096] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.820s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.724503] env[63538]: INFO nova.compute.claims [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 663.827060] env[63538]: DEBUG oslo_vmware.api [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100555, 'name': RelocateVM_Task} progress is 98%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.866905] env[63538]: DEBUG nova.network.neutron [-] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.084290] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100569, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.666197} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.085350] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] b5593b74-fe89-43f5-a8c6-e73159b4efac/b5593b74-fe89-43f5-a8c6-e73159b4efac.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 664.085727] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 664.086104] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-be747d19-20dc-4e14-9578-bfa190edd89a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.096040] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 664.096040] env[63538]: value = "task-5100572" [ 664.096040] env[63538]: _type = "Task" [ 664.096040] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.099526] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Releasing lock "refresh_cache-ee9fe572-7a17-46db-8330-4b6f632c6b2c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.100137] env[63538]: DEBUG nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Instance network_info: |[{"id": "eb08b565-8194-4325-b89d-38b0f6615179", "address": "fa:16:3e:29:79:e8", "network": {"id": "1ffc7eb0-0516-45ff-a0e3-3f2887487b78", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-550035383-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "452b39ccca6b4fcba39b1e61f0508f14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f256cfee-512d-4192-9aca-6750fdb1cd4c", "external-id": "nsx-vlan-transportzone-821", "segmentation_id": 821, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb08b565-81", "ovs_interfaceid": "eb08b565-8194-4325-b89d-38b0f6615179", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 664.100623] env[63538]: DEBUG oslo_concurrency.lockutils [req-00c94926-de7c-4d46-8721-695126cd4d25 req-f48bcfc4-9c7a-4402-a0b7-897616aa2e23 service nova] Acquired lock "refresh_cache-ee9fe572-7a17-46db-8330-4b6f632c6b2c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.100971] env[63538]: DEBUG nova.network.neutron [req-00c94926-de7c-4d46-8721-695126cd4d25 req-f48bcfc4-9c7a-4402-a0b7-897616aa2e23 service nova] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Refreshing network info cache for port eb08b565-8194-4325-b89d-38b0f6615179 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 664.104402] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:79:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f256cfee-512d-4192-9aca-6750fdb1cd4c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eb08b565-8194-4325-b89d-38b0f6615179', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 664.116230] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Creating folder: Project (452b39ccca6b4fcba39b1e61f0508f14). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 664.121187] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ed5f1f2e-801b-49ed-a5ca-a3ee3f4a34a0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.134143] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100572, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.141862] env[63538]: DEBUG oslo_vmware.api [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Task: {'id': task-5100571, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.346918} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.143642] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 664.144275] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 664.144275] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 664.144275] env[63538]: INFO nova.compute.manager [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Took 1.18 seconds to destroy the instance on the hypervisor. [ 664.144496] env[63538]: DEBUG oslo.service.loopingcall [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 664.144724] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Created folder: Project (452b39ccca6b4fcba39b1e61f0508f14) in parent group-v992234. [ 664.144998] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Creating folder: Instances. Parent ref: group-v992309. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 664.145205] env[63538]: DEBUG nova.compute.manager [-] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 664.145339] env[63538]: DEBUG nova.network.neutron [-] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 664.147137] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-de32c72b-9d76-4bf5-8d10-a66ad7eeb31a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.159458] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Created folder: Instances in parent group-v992309. [ 664.160010] env[63538]: DEBUG oslo.service.loopingcall [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 664.160010] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 664.160854] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-75beceed-35fd-4e29-be4a-eb6a61ea0e5b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.183543] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 664.183543] env[63538]: value = "task-5100575" [ 664.183543] env[63538]: _type = "Task" [ 664.183543] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.184433] env[63538]: DEBUG nova.network.neutron [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Successfully updated port: 42827ce2-838a-4a4a-b29a-40f1d29e7a3e {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 664.197146] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100575, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.233027] env[63538]: DEBUG nova.compute.utils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 664.234436] env[63538]: DEBUG nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 664.234748] env[63538]: DEBUG nova.network.neutron [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 664.326379] env[63538]: DEBUG oslo_vmware.api [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100555, 'name': RelocateVM_Task, 'duration_secs': 4.791648} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.327132] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Volume attach. Driver type: vmdk {{(pid=63538) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 664.327360] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992278', 'volume_id': '9cb99482-8e0b-49d1-9249-8f411b389c09', 'name': 'volume-9cb99482-8e0b-49d1-9249-8f411b389c09', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '47500aaa-92fc-454c-badd-d6f8a2203083', 'attached_at': '', 'detached_at': '', 'volume_id': '9cb99482-8e0b-49d1-9249-8f411b389c09', 'serial': '9cb99482-8e0b-49d1-9249-8f411b389c09'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 664.328272] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07442585-8f94-4bc4-a5db-ff0ee54a25b5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.346259] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c906bb3-c2c4-4077-b5f0-e05371800ec0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.350618] env[63538]: DEBUG nova.policy [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '15bca987c1374dc4a427e80bf03f19ee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '452b39ccca6b4fcba39b1e61f0508f14', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 664.373858] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Reconfiguring VM instance instance-00000019 to attach disk [datastore2] volume-9cb99482-8e0b-49d1-9249-8f411b389c09/volume-9cb99482-8e0b-49d1-9249-8f411b389c09.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 664.376887] env[63538]: INFO nova.compute.manager [-] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Took 1.38 seconds to deallocate network for instance. [ 664.378026] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0703c180-1555-4e8b-a42d-64c3e0967c07 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.408050] env[63538]: DEBUG oslo_vmware.api [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Waiting for the task: (returnval){ [ 664.408050] env[63538]: value = "task-5100576" [ 664.408050] env[63538]: _type = "Task" [ 664.408050] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.414774] env[63538]: DEBUG oslo_vmware.api [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100576, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.523825] env[63538]: DEBUG nova.network.neutron [req-00c94926-de7c-4d46-8721-695126cd4d25 req-f48bcfc4-9c7a-4402-a0b7-897616aa2e23 service nova] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Updated VIF entry in instance network info cache for port eb08b565-8194-4325-b89d-38b0f6615179. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 664.524159] env[63538]: DEBUG nova.network.neutron [req-00c94926-de7c-4d46-8721-695126cd4d25 req-f48bcfc4-9c7a-4402-a0b7-897616aa2e23 service nova] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Updating instance_info_cache with network_info: [{"id": "eb08b565-8194-4325-b89d-38b0f6615179", "address": "fa:16:3e:29:79:e8", "network": {"id": "1ffc7eb0-0516-45ff-a0e3-3f2887487b78", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-550035383-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "452b39ccca6b4fcba39b1e61f0508f14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f256cfee-512d-4192-9aca-6750fdb1cd4c", "external-id": "nsx-vlan-transportzone-821", "segmentation_id": 821, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb08b565-81", "ovs_interfaceid": "eb08b565-8194-4325-b89d-38b0f6615179", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.608560] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100572, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07014} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.608884] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 664.609731] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8239d840-572e-4c73-b855-ca645bb9325c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.638414] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Reconfiguring VM instance instance-0000001a to attach disk [datastore2] b5593b74-fe89-43f5-a8c6-e73159b4efac/b5593b74-fe89-43f5-a8c6-e73159b4efac.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 664.638708] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48e82092-d220-43ea-aa57-fd602659dec8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.664888] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 664.664888] env[63538]: value = "task-5100577" [ 664.664888] env[63538]: _type = "Task" [ 664.664888] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.675975] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100577, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.691642] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquiring lock "refresh_cache-5421e135-9581-4f81-aa8a-2a604887a1df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.691964] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquired lock "refresh_cache-5421e135-9581-4f81-aa8a-2a604887a1df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.692299] env[63538]: DEBUG nova.network.neutron [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 664.701582] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100575, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.878881] env[63538]: DEBUG nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 664.898962] env[63538]: DEBUG oslo_concurrency.lockutils [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.922748] env[63538]: DEBUG oslo_vmware.api [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100576, 'name': ReconfigVM_Task, 'duration_secs': 0.482089} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.922748] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Reconfigured VM instance instance-00000019 to attach disk [datastore2] volume-9cb99482-8e0b-49d1-9249-8f411b389c09/volume-9cb99482-8e0b-49d1-9249-8f411b389c09.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 664.928037] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0e753c8-c120-4c53-a710-2ad94aeb2be6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.945424] env[63538]: DEBUG oslo_vmware.api [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Waiting for the task: (returnval){ [ 664.945424] env[63538]: value = "task-5100578" [ 664.945424] env[63538]: _type = "Task" [ 664.945424] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.957040] env[63538]: DEBUG oslo_vmware.api [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100578, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.027599] env[63538]: DEBUG oslo_concurrency.lockutils [req-00c94926-de7c-4d46-8721-695126cd4d25 req-f48bcfc4-9c7a-4402-a0b7-897616aa2e23 service nova] Releasing lock "refresh_cache-ee9fe572-7a17-46db-8330-4b6f632c6b2c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.181088] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100577, 'name': ReconfigVM_Task, 'duration_secs': 0.345078} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.183611] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Reconfigured VM instance instance-0000001a to attach disk [datastore2] b5593b74-fe89-43f5-a8c6-e73159b4efac/b5593b74-fe89-43f5-a8c6-e73159b4efac.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 665.185498] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f33b7ed-1988-4f1d-97d2-65ce2c1f720f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.202885] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 665.202885] env[63538]: value = "task-5100579" [ 665.202885] env[63538]: _type = "Task" [ 665.202885] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.206671] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100575, 'name': CreateVM_Task, 'duration_secs': 0.558846} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.209786] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 665.210508] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 665.210682] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.210983] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 665.211814] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2c3dd8f-0bff-4fa6-a668-6edeb1e89a2a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.221920] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 665.221920] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52243d5e-e248-8fa5-8e4f-9046a39f873c" [ 665.221920] env[63538]: _type = "Task" [ 665.221920] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.222710] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100579, 'name': Rename_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.234131] env[63538]: DEBUG nova.network.neutron [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Successfully created port: 0be458b1-bd73-4d0f-8fd8-bcfec3c520c7 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 665.243453] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52243d5e-e248-8fa5-8e4f-9046a39f873c, 'name': SearchDatastore_Task, 'duration_secs': 0.010263} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.245324] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.245497] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 665.245724] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 665.245910] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.246537] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 665.248238] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3525922c-f16c-41ae-8ec8-5f1a8d46d3b0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.259165] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 665.259165] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 665.262384] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23adb650-6b6d-4d7b-aad1-60d2d755ff9f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.267775] env[63538]: DEBUG nova.network.neutron [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 665.271346] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 665.271346] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523caccd-c79c-1d51-145b-74af47266c54" [ 665.271346] env[63538]: _type = "Task" [ 665.271346] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.281970] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523caccd-c79c-1d51-145b-74af47266c54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.459189] env[63538]: DEBUG oslo_vmware.api [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100578, 'name': ReconfigVM_Task, 'duration_secs': 0.136487} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.459937] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992278', 'volume_id': '9cb99482-8e0b-49d1-9249-8f411b389c09', 'name': 'volume-9cb99482-8e0b-49d1-9249-8f411b389c09', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '47500aaa-92fc-454c-badd-d6f8a2203083', 'attached_at': '', 'detached_at': '', 'volume_id': '9cb99482-8e0b-49d1-9249-8f411b389c09', 'serial': '9cb99482-8e0b-49d1-9249-8f411b389c09'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 665.464264] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7f38d7da-d88c-44e4-b9fe-345aa0acd3a4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.472425] env[63538]: DEBUG oslo_vmware.api [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Waiting for the task: (returnval){ [ 665.472425] env[63538]: value = "task-5100580" [ 665.472425] env[63538]: _type = "Task" [ 665.472425] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.484436] env[63538]: DEBUG oslo_vmware.api [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100580, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.495177] env[63538]: DEBUG nova.network.neutron [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Updating instance_info_cache with network_info: [{"id": "42827ce2-838a-4a4a-b29a-40f1d29e7a3e", "address": "fa:16:3e:3f:84:1a", "network": {"id": "1ffc7eb0-0516-45ff-a0e3-3f2887487b78", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-550035383-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "452b39ccca6b4fcba39b1e61f0508f14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f256cfee-512d-4192-9aca-6750fdb1cd4c", "external-id": "nsx-vlan-transportzone-821", "segmentation_id": 821, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42827ce2-83", "ovs_interfaceid": "42827ce2-838a-4a4a-b29a-40f1d29e7a3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.568424] env[63538]: DEBUG nova.compute.manager [req-1a6934fe-5756-4c3b-8b37-5ac34c9f180d req-0c8c98bf-a907-4bbb-93a6-3feeed15be40 service nova] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Received event network-vif-deleted-4c120970-8c28-4096-892b-31c6a3b13bbe {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 665.568660] env[63538]: DEBUG nova.compute.manager [req-1a6934fe-5756-4c3b-8b37-5ac34c9f180d req-0c8c98bf-a907-4bbb-93a6-3feeed15be40 service nova] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Received event network-vif-plugged-42827ce2-838a-4a4a-b29a-40f1d29e7a3e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 665.568861] env[63538]: DEBUG oslo_concurrency.lockutils [req-1a6934fe-5756-4c3b-8b37-5ac34c9f180d req-0c8c98bf-a907-4bbb-93a6-3feeed15be40 service nova] Acquiring lock "5421e135-9581-4f81-aa8a-2a604887a1df-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.569091] env[63538]: DEBUG oslo_concurrency.lockutils [req-1a6934fe-5756-4c3b-8b37-5ac34c9f180d req-0c8c98bf-a907-4bbb-93a6-3feeed15be40 service nova] Lock "5421e135-9581-4f81-aa8a-2a604887a1df-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.569366] env[63538]: DEBUG oslo_concurrency.lockutils [req-1a6934fe-5756-4c3b-8b37-5ac34c9f180d req-0c8c98bf-a907-4bbb-93a6-3feeed15be40 service nova] Lock "5421e135-9581-4f81-aa8a-2a604887a1df-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 665.569546] env[63538]: DEBUG nova.compute.manager [req-1a6934fe-5756-4c3b-8b37-5ac34c9f180d req-0c8c98bf-a907-4bbb-93a6-3feeed15be40 service nova] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] No waiting events found dispatching network-vif-plugged-42827ce2-838a-4a4a-b29a-40f1d29e7a3e {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 665.569717] env[63538]: WARNING nova.compute.manager [req-1a6934fe-5756-4c3b-8b37-5ac34c9f180d req-0c8c98bf-a907-4bbb-93a6-3feeed15be40 service nova] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Received unexpected event network-vif-plugged-42827ce2-838a-4a4a-b29a-40f1d29e7a3e for instance with vm_state building and task_state spawning. [ 665.570348] env[63538]: DEBUG nova.compute.manager [req-1a6934fe-5756-4c3b-8b37-5ac34c9f180d req-0c8c98bf-a907-4bbb-93a6-3feeed15be40 service nova] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Received event network-changed-42827ce2-838a-4a4a-b29a-40f1d29e7a3e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 665.570467] env[63538]: DEBUG nova.compute.manager [req-1a6934fe-5756-4c3b-8b37-5ac34c9f180d req-0c8c98bf-a907-4bbb-93a6-3feeed15be40 service nova] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Refreshing instance network info cache due to event network-changed-42827ce2-838a-4a4a-b29a-40f1d29e7a3e. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 665.571174] env[63538]: DEBUG oslo_concurrency.lockutils [req-1a6934fe-5756-4c3b-8b37-5ac34c9f180d req-0c8c98bf-a907-4bbb-93a6-3feeed15be40 service nova] Acquiring lock "refresh_cache-5421e135-9581-4f81-aa8a-2a604887a1df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 665.607976] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54689b37-3aad-48bc-a3dc-f321810a30be {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.616916] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76194535-4cfc-4d63-91c0-e3e2a42cb153 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.651357] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Acquiring lock "e32789d5-59ba-4657-9a9c-84fc9bd6cfdf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.651566] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Lock "e32789d5-59ba-4657-9a9c-84fc9bd6cfdf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.651772] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Acquiring lock "e32789d5-59ba-4657-9a9c-84fc9bd6cfdf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.652115] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Lock "e32789d5-59ba-4657-9a9c-84fc9bd6cfdf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.652219] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Lock "e32789d5-59ba-4657-9a9c-84fc9bd6cfdf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 665.654799] env[63538]: INFO nova.compute.manager [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Terminating instance [ 665.657750] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-820f1c7b-c614-421b-8b06-902b7f047f6f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.660729] env[63538]: DEBUG nova.compute.manager [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 665.660914] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 665.662019] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-166c22ba-196d-40fb-9703-36103f102eec {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.676761] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b425ef61-27f5-435f-9d9f-7c29700afc3e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.684033] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 665.684033] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d949dc8e-af8b-4bb1-b188-dc6d4c16073c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.697549] env[63538]: DEBUG nova.compute.provider_tree [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 665.718202] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100579, 'name': Rename_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.747935] env[63538]: DEBUG nova.network.neutron [-] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.757958] env[63538]: DEBUG nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 665.759919] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 665.760591] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 665.760591] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Deleting the datastore file [datastore1] e32789d5-59ba-4657-9a9c-84fc9bd6cfdf {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 665.760873] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e44b6b0e-3539-4292-8300-b92a14411dfb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.770051] env[63538]: DEBUG oslo_vmware.api [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Waiting for the task: (returnval){ [ 665.770051] env[63538]: value = "task-5100582" [ 665.770051] env[63538]: _type = "Task" [ 665.770051] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.785717] env[63538]: DEBUG oslo_vmware.api [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100582, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.788535] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 665.789057] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 665.789057] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 665.789265] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 665.789441] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 665.789602] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 665.791043] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 665.791272] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 665.791501] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 665.792881] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 665.792881] env[63538]: DEBUG nova.virt.hardware [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 665.792881] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a1ac207-7592-4007-950e-3543706fbb9d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.799704] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523caccd-c79c-1d51-145b-74af47266c54, 'name': SearchDatastore_Task, 'duration_secs': 0.018177} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.801063] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-015196d0-01a7-409a-b4c1-4b7278dc4110 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.807509] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe8f9baf-e2c2-4a8c-8195-b76b8163cb08 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.814368] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 665.814368] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52104f66-ea04-37f5-7a65-d296cab50b05" [ 665.814368] env[63538]: _type = "Task" [ 665.814368] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.831887] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52104f66-ea04-37f5-7a65-d296cab50b05, 'name': SearchDatastore_Task, 'duration_secs': 0.020026} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.832233] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.832590] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] ee9fe572-7a17-46db-8330-4b6f632c6b2c/ee9fe572-7a17-46db-8330-4b6f632c6b2c.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 665.832776] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-83226fc0-1c0c-439c-9fd8-676efbee8c1f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.843555] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 665.843555] env[63538]: value = "task-5100583" [ 665.843555] env[63538]: _type = "Task" [ 665.843555] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.852649] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100583, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.985907] env[63538]: DEBUG oslo_vmware.api [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100580, 'name': Rename_Task, 'duration_secs': 0.144069} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.986231] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 665.987027] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c8774fa9-a109-4bd1-acbb-28b064949f76 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.994254] env[63538]: DEBUG oslo_vmware.api [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Waiting for the task: (returnval){ [ 665.994254] env[63538]: value = "task-5100584" [ 665.994254] env[63538]: _type = "Task" [ 665.994254] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.997810] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Releasing lock "refresh_cache-5421e135-9581-4f81-aa8a-2a604887a1df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.998140] env[63538]: DEBUG nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Instance network_info: |[{"id": "42827ce2-838a-4a4a-b29a-40f1d29e7a3e", "address": "fa:16:3e:3f:84:1a", "network": {"id": "1ffc7eb0-0516-45ff-a0e3-3f2887487b78", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-550035383-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "452b39ccca6b4fcba39b1e61f0508f14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f256cfee-512d-4192-9aca-6750fdb1cd4c", "external-id": "nsx-vlan-transportzone-821", "segmentation_id": 821, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42827ce2-83", "ovs_interfaceid": "42827ce2-838a-4a4a-b29a-40f1d29e7a3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 665.998418] env[63538]: DEBUG oslo_concurrency.lockutils [req-1a6934fe-5756-4c3b-8b37-5ac34c9f180d req-0c8c98bf-a907-4bbb-93a6-3feeed15be40 service nova] Acquired lock "refresh_cache-5421e135-9581-4f81-aa8a-2a604887a1df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.998596] env[63538]: DEBUG nova.network.neutron [req-1a6934fe-5756-4c3b-8b37-5ac34c9f180d req-0c8c98bf-a907-4bbb-93a6-3feeed15be40 service nova] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Refreshing network info cache for port 42827ce2-838a-4a4a-b29a-40f1d29e7a3e {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 666.000674] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:84:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f256cfee-512d-4192-9aca-6750fdb1cd4c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '42827ce2-838a-4a4a-b29a-40f1d29e7a3e', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 666.007586] env[63538]: DEBUG oslo.service.loopingcall [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 666.008713] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 666.008957] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bb06e246-b3d7-4981-a24e-fd2e8b20a4aa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.028863] env[63538]: DEBUG oslo_vmware.api [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100584, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.034733] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 666.034733] env[63538]: value = "task-5100585" [ 666.034733] env[63538]: _type = "Task" [ 666.034733] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.044753] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100585, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.199580] env[63538]: DEBUG nova.scheduler.client.report [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 666.223465] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100579, 'name': Rename_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.252249] env[63538]: INFO nova.compute.manager [-] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Took 2.11 seconds to deallocate network for instance. [ 666.280493] env[63538]: DEBUG oslo_vmware.api [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100582, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169859} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.280798] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 666.281749] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 666.281749] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 666.281749] env[63538]: INFO nova.compute.manager [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Took 0.62 seconds to destroy the instance on the hypervisor. [ 666.281749] env[63538]: DEBUG oslo.service.loopingcall [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 666.281954] env[63538]: DEBUG nova.compute.manager [-] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 666.281954] env[63538]: DEBUG nova.network.neutron [-] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 666.357094] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100583, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.514151] env[63538]: DEBUG oslo_vmware.api [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100584, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.547968] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100585, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.629847] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Acquiring lock "bf54098e-91a8-403f-a6fe-b58a62daaadb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.633032] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Lock "bf54098e-91a8-403f-a6fe-b58a62daaadb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.633032] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Acquiring lock "bf54098e-91a8-403f-a6fe-b58a62daaadb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.633032] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Lock "bf54098e-91a8-403f-a6fe-b58a62daaadb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.633032] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Lock "bf54098e-91a8-403f-a6fe-b58a62daaadb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.633577] env[63538]: INFO nova.compute.manager [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Terminating instance [ 666.635080] env[63538]: DEBUG nova.compute.manager [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 666.635220] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 666.636960] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f69de37-621e-4910-815b-19eb04c2c39e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.648520] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 666.648745] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c1b0c775-c26e-4b32-8dd5-85c801eda957 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.664287] env[63538]: DEBUG oslo_vmware.api [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Waiting for the task: (returnval){ [ 666.664287] env[63538]: value = "task-5100586" [ 666.664287] env[63538]: _type = "Task" [ 666.664287] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.676528] env[63538]: DEBUG nova.compute.manager [req-0572cef3-d456-4b2e-92a9-8ef89c1712f9 req-424bd9dc-f292-42f8-8e1d-adab745da069 service nova] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Received event network-vif-deleted-204795e5-710d-4501-95e6-1353e467aded {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 666.676757] env[63538]: INFO nova.compute.manager [req-0572cef3-d456-4b2e-92a9-8ef89c1712f9 req-424bd9dc-f292-42f8-8e1d-adab745da069 service nova] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Neutron deleted interface 204795e5-710d-4501-95e6-1353e467aded; detaching it from the instance and deleting it from the info cache [ 666.676938] env[63538]: DEBUG nova.network.neutron [req-0572cef3-d456-4b2e-92a9-8ef89c1712f9 req-424bd9dc-f292-42f8-8e1d-adab745da069 service nova] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.684281] env[63538]: DEBUG oslo_vmware.api [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Task: {'id': task-5100586, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.708915] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.987s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.709609] env[63538]: DEBUG nova.compute.manager [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 666.715529] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.661s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.717288] env[63538]: INFO nova.compute.claims [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 666.742023] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100579, 'name': Rename_Task, 'duration_secs': 1.178726} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.745086] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 666.745907] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-33283f5a-1754-4062-83e7-06ada98a6a8a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.756216] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 666.756216] env[63538]: value = "task-5100587" [ 666.756216] env[63538]: _type = "Task" [ 666.756216] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.761798] env[63538]: DEBUG oslo_concurrency.lockutils [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.770497] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100587, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.855266] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100583, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.536105} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.855556] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] ee9fe572-7a17-46db-8330-4b6f632c6b2c/ee9fe572-7a17-46db-8330-4b6f632c6b2c.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 666.855767] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 666.856036] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ad72c1f8-e0f3-4376-a76a-c1a8fe6926b4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.863230] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 666.863230] env[63538]: value = "task-5100588" [ 666.863230] env[63538]: _type = "Task" [ 666.863230] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.875150] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100588, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.007329] env[63538]: DEBUG oslo_vmware.api [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100584, 'name': PowerOnVM_Task, 'duration_secs': 0.772572} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.007659] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 667.007768] env[63538]: INFO nova.compute.manager [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Took 11.14 seconds to spawn the instance on the hypervisor. [ 667.007941] env[63538]: DEBUG nova.compute.manager [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 667.008781] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d67ff3b-b2a3-4165-aafd-fa486781bde9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.012186] env[63538]: DEBUG nova.network.neutron [req-1a6934fe-5756-4c3b-8b37-5ac34c9f180d req-0c8c98bf-a907-4bbb-93a6-3feeed15be40 service nova] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Updated VIF entry in instance network info cache for port 42827ce2-838a-4a4a-b29a-40f1d29e7a3e. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 667.012520] env[63538]: DEBUG nova.network.neutron [req-1a6934fe-5756-4c3b-8b37-5ac34c9f180d req-0c8c98bf-a907-4bbb-93a6-3feeed15be40 service nova] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Updating instance_info_cache with network_info: [{"id": "42827ce2-838a-4a4a-b29a-40f1d29e7a3e", "address": "fa:16:3e:3f:84:1a", "network": {"id": "1ffc7eb0-0516-45ff-a0e3-3f2887487b78", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-550035383-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "452b39ccca6b4fcba39b1e61f0508f14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f256cfee-512d-4192-9aca-6750fdb1cd4c", "external-id": "nsx-vlan-transportzone-821", "segmentation_id": 821, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42827ce2-83", "ovs_interfaceid": "42827ce2-838a-4a4a-b29a-40f1d29e7a3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.052183] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100585, 'name': CreateVM_Task, 'duration_secs': 0.586649} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.052183] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 667.052800] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 667.052993] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.053346] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 667.053605] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edbad2bf-e839-4ec6-8133-e9da64eb3de6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.059525] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 667.059525] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c9eb12-e25b-26d6-2e63-98ae0ca485e9" [ 667.059525] env[63538]: _type = "Task" [ 667.059525] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.069204] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c9eb12-e25b-26d6-2e63-98ae0ca485e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.107516] env[63538]: DEBUG nova.network.neutron [-] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.176028] env[63538]: DEBUG oslo_vmware.api [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Task: {'id': task-5100586, 'name': PowerOffVM_Task, 'duration_secs': 0.200318} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.176320] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 667.176493] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 667.176749] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ce50ef90-ef77-48e2-b768-f2d347f50319 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.180173] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2de279ae-340d-4425-baeb-3fe6ac7e2d66 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.190229] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e777b2-7fa3-4aa3-852c-16df06969b25 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.234673] env[63538]: DEBUG nova.compute.utils [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 667.236868] env[63538]: DEBUG nova.compute.manager [req-0572cef3-d456-4b2e-92a9-8ef89c1712f9 req-424bd9dc-f292-42f8-8e1d-adab745da069 service nova] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Detach interface failed, port_id=204795e5-710d-4501-95e6-1353e467aded, reason: Instance e32789d5-59ba-4657-9a9c-84fc9bd6cfdf could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 667.238567] env[63538]: DEBUG nova.compute.manager [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 667.238567] env[63538]: DEBUG nova.network.neutron [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 667.269331] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100587, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.270858] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 667.271088] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 667.271277] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Deleting the datastore file [datastore1] bf54098e-91a8-403f-a6fe-b58a62daaadb {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 667.271544] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-351cb967-be6c-41c7-8596-c3faf8d80ff2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.284047] env[63538]: DEBUG oslo_vmware.api [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Waiting for the task: (returnval){ [ 667.284047] env[63538]: value = "task-5100590" [ 667.284047] env[63538]: _type = "Task" [ 667.284047] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.293568] env[63538]: DEBUG oslo_vmware.api [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Task: {'id': task-5100590, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.373607] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100588, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.145088} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.373924] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 667.374772] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22899f0a-343a-4f99-a6aa-dfb21cf14258 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.403629] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Reconfiguring VM instance instance-0000001b to attach disk [datastore2] ee9fe572-7a17-46db-8330-4b6f632c6b2c/ee9fe572-7a17-46db-8330-4b6f632c6b2c.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 667.405478] env[63538]: DEBUG nova.policy [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '85f992ab95cd4330bb767b3dd04a0e25', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '94d1cf2838014527bb9c399ae0cff7ce', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 667.407214] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63ce36f5-5c66-457e-879f-9de7d374944d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.434510] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 667.434510] env[63538]: value = "task-5100591" [ 667.434510] env[63538]: _type = "Task" [ 667.434510] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.445086] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100591, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.500124] env[63538]: DEBUG nova.network.neutron [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Successfully updated port: 0be458b1-bd73-4d0f-8fd8-bcfec3c520c7 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 667.515999] env[63538]: DEBUG oslo_concurrency.lockutils [req-1a6934fe-5756-4c3b-8b37-5ac34c9f180d req-0c8c98bf-a907-4bbb-93a6-3feeed15be40 service nova] Releasing lock "refresh_cache-5421e135-9581-4f81-aa8a-2a604887a1df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 667.516198] env[63538]: DEBUG nova.compute.manager [req-1a6934fe-5756-4c3b-8b37-5ac34c9f180d req-0c8c98bf-a907-4bbb-93a6-3feeed15be40 service nova] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Received event network-vif-deleted-16b1a07f-5af3-4a11-967b-acc2df708c1d {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 667.516351] env[63538]: INFO nova.compute.manager [req-1a6934fe-5756-4c3b-8b37-5ac34c9f180d req-0c8c98bf-a907-4bbb-93a6-3feeed15be40 service nova] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Neutron deleted interface 16b1a07f-5af3-4a11-967b-acc2df708c1d; detaching it from the instance and deleting it from the info cache [ 667.516523] env[63538]: DEBUG nova.network.neutron [req-1a6934fe-5756-4c3b-8b37-5ac34c9f180d req-0c8c98bf-a907-4bbb-93a6-3feeed15be40 service nova] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.535045] env[63538]: INFO nova.compute.manager [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Took 45.80 seconds to build instance. [ 667.572427] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c9eb12-e25b-26d6-2e63-98ae0ca485e9, 'name': SearchDatastore_Task, 'duration_secs': 0.030387} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.573242] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 667.573499] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 667.573817] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 667.573980] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.574175] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 667.574445] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-175a8be2-66e2-41b3-97d1-2540bc0a5eea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.587767] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 667.587767] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 667.588568] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86016c65-93fe-446c-8c9c-b076e73d481b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.596351] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 667.596351] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52413da4-0a95-7c1d-c9e1-183922317d29" [ 667.596351] env[63538]: _type = "Task" [ 667.596351] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.611719] env[63538]: INFO nova.compute.manager [-] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Took 1.33 seconds to deallocate network for instance. [ 667.612220] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52413da4-0a95-7c1d-c9e1-183922317d29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.738946] env[63538]: DEBUG nova.compute.manager [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 667.753763] env[63538]: DEBUG nova.compute.manager [req-3452fe7c-616f-4673-bbaa-f3369e8de66b req-3d15499c-ca39-40e9-b927-dc5b684ff577 service nova] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Received event network-vif-plugged-0be458b1-bd73-4d0f-8fd8-bcfec3c520c7 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 667.753763] env[63538]: DEBUG oslo_concurrency.lockutils [req-3452fe7c-616f-4673-bbaa-f3369e8de66b req-3d15499c-ca39-40e9-b927-dc5b684ff577 service nova] Acquiring lock "65fc18ff-8901-40d2-8a5b-640eb9768240-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.753763] env[63538]: DEBUG oslo_concurrency.lockutils [req-3452fe7c-616f-4673-bbaa-f3369e8de66b req-3d15499c-ca39-40e9-b927-dc5b684ff577 service nova] Lock "65fc18ff-8901-40d2-8a5b-640eb9768240-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.753763] env[63538]: DEBUG oslo_concurrency.lockutils [req-3452fe7c-616f-4673-bbaa-f3369e8de66b req-3d15499c-ca39-40e9-b927-dc5b684ff577 service nova] Lock "65fc18ff-8901-40d2-8a5b-640eb9768240-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 667.753763] env[63538]: DEBUG nova.compute.manager [req-3452fe7c-616f-4673-bbaa-f3369e8de66b req-3d15499c-ca39-40e9-b927-dc5b684ff577 service nova] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] No waiting events found dispatching network-vif-plugged-0be458b1-bd73-4d0f-8fd8-bcfec3c520c7 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 667.753966] env[63538]: WARNING nova.compute.manager [req-3452fe7c-616f-4673-bbaa-f3369e8de66b req-3d15499c-ca39-40e9-b927-dc5b684ff577 service nova] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Received unexpected event network-vif-plugged-0be458b1-bd73-4d0f-8fd8-bcfec3c520c7 for instance with vm_state building and task_state spawning. [ 667.754149] env[63538]: DEBUG nova.compute.manager [req-3452fe7c-616f-4673-bbaa-f3369e8de66b req-3d15499c-ca39-40e9-b927-dc5b684ff577 service nova] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Received event network-changed-0be458b1-bd73-4d0f-8fd8-bcfec3c520c7 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 667.754337] env[63538]: DEBUG nova.compute.manager [req-3452fe7c-616f-4673-bbaa-f3369e8de66b req-3d15499c-ca39-40e9-b927-dc5b684ff577 service nova] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Refreshing instance network info cache due to event network-changed-0be458b1-bd73-4d0f-8fd8-bcfec3c520c7. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 667.754555] env[63538]: DEBUG oslo_concurrency.lockutils [req-3452fe7c-616f-4673-bbaa-f3369e8de66b req-3d15499c-ca39-40e9-b927-dc5b684ff577 service nova] Acquiring lock "refresh_cache-65fc18ff-8901-40d2-8a5b-640eb9768240" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 667.754796] env[63538]: DEBUG oslo_concurrency.lockutils [req-3452fe7c-616f-4673-bbaa-f3369e8de66b req-3d15499c-ca39-40e9-b927-dc5b684ff577 service nova] Acquired lock "refresh_cache-65fc18ff-8901-40d2-8a5b-640eb9768240" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.754877] env[63538]: DEBUG nova.network.neutron [req-3452fe7c-616f-4673-bbaa-f3369e8de66b req-3d15499c-ca39-40e9-b927-dc5b684ff577 service nova] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Refreshing network info cache for port 0be458b1-bd73-4d0f-8fd8-bcfec3c520c7 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 667.773977] env[63538]: DEBUG oslo_vmware.api [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100587, 'name': PowerOnVM_Task, 'duration_secs': 0.639623} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.775032] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 667.775032] env[63538]: INFO nova.compute.manager [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Took 10.83 seconds to spawn the instance on the hypervisor. [ 667.775032] env[63538]: DEBUG nova.compute.manager [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 667.776067] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d165ae-6120-462e-a9eb-4a1dd878ccc1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.803266] env[63538]: DEBUG oslo_vmware.api [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Task: {'id': task-5100590, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.944048] env[63538]: DEBUG nova.network.neutron [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Successfully created port: 27c8d553-e481-41cf-9f67-20912d2adb46 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 667.952591] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100591, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.002955] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquiring lock "refresh_cache-65fc18ff-8901-40d2-8a5b-640eb9768240" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 668.019960] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-77b44064-a283-4b74-94c3-2907ff3ef5c6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.032870] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e868cc4-86fc-4845-b3e3-4a13684fcb82 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.049955] env[63538]: DEBUG oslo_concurrency.lockutils [None req-26260899-8b9e-476f-afc8-2f4bd6f642ec tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Lock "47500aaa-92fc-454c-badd-d6f8a2203083" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.326s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.076985] env[63538]: DEBUG nova.compute.manager [req-1a6934fe-5756-4c3b-8b37-5ac34c9f180d req-0c8c98bf-a907-4bbb-93a6-3feeed15be40 service nova] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Detach interface failed, port_id=16b1a07f-5af3-4a11-967b-acc2df708c1d, reason: Instance 543875b5-195a-476d-a0b4-3211ceefa27f could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 668.108950] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52413da4-0a95-7c1d-c9e1-183922317d29, 'name': SearchDatastore_Task, 'duration_secs': 0.038739} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.109903] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab907af2-537f-446c-9d9f-30cfdf7c879f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.121946] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.122398] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 668.122398] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52076801-7d46-cfa0-a954-1177f32680f0" [ 668.122398] env[63538]: _type = "Task" [ 668.122398] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.136841] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52076801-7d46-cfa0-a954-1177f32680f0, 'name': SearchDatastore_Task, 'duration_secs': 0.010959} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.137181] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 668.137529] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 5421e135-9581-4f81-aa8a-2a604887a1df/5421e135-9581-4f81-aa8a-2a604887a1df.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 668.137849] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1c229d60-ac6f-4f9d-ac4f-8437f589a8cf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.146343] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 668.146343] env[63538]: value = "task-5100592" [ 668.146343] env[63538]: _type = "Task" [ 668.146343] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.161402] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100592, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.305501] env[63538]: DEBUG oslo_vmware.api [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Task: {'id': task-5100590, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.552272} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.315260] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 668.315260] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 668.315260] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 668.315260] env[63538]: INFO nova.compute.manager [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Took 1.68 seconds to destroy the instance on the hypervisor. [ 668.315260] env[63538]: DEBUG oslo.service.loopingcall [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 668.319199] env[63538]: DEBUG nova.compute.manager [-] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 668.319199] env[63538]: DEBUG nova.network.neutron [-] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 668.321193] env[63538]: INFO nova.compute.manager [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Took 42.48 seconds to build instance. [ 668.348800] env[63538]: DEBUG nova.network.neutron [req-3452fe7c-616f-4673-bbaa-f3369e8de66b req-3d15499c-ca39-40e9-b927-dc5b684ff577 service nova] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 668.444911] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100591, 'name': ReconfigVM_Task, 'duration_secs': 0.79181} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.448032] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Reconfigured VM instance instance-0000001b to attach disk [datastore2] ee9fe572-7a17-46db-8330-4b6f632c6b2c/ee9fe572-7a17-46db-8330-4b6f632c6b2c.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 668.448842] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-80a74f18-361c-4a8d-af5b-79fee90b7caf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.460226] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 668.460226] env[63538]: value = "task-5100593" [ 668.460226] env[63538]: _type = "Task" [ 668.460226] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.474368] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100593, 'name': Rename_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.512022] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe41495-bae1-4b9d-aca2-953e56958c30 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.519337] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c2308d-2e2c-4dbb-9ef2-f6c61bc23a86 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.560074] env[63538]: DEBUG nova.compute.manager [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 668.563685] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d986908-6e84-471b-95ae-a879f0f5d6e9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.572831] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-694e02de-96cd-4045-b4f1-7861d6775d8d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.589208] env[63538]: DEBUG nova.compute.provider_tree [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 668.640251] env[63538]: DEBUG nova.network.neutron [req-3452fe7c-616f-4673-bbaa-f3369e8de66b req-3d15499c-ca39-40e9-b927-dc5b684ff577 service nova] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.658036] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100592, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.754056] env[63538]: DEBUG nova.compute.manager [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 668.791853] env[63538]: DEBUG nova.virt.hardware [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 668.792161] env[63538]: DEBUG nova.virt.hardware [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 668.792331] env[63538]: DEBUG nova.virt.hardware [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 668.792515] env[63538]: DEBUG nova.virt.hardware [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 668.792685] env[63538]: DEBUG nova.virt.hardware [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 668.792960] env[63538]: DEBUG nova.virt.hardware [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 668.793211] env[63538]: DEBUG nova.virt.hardware [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 668.793375] env[63538]: DEBUG nova.virt.hardware [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 668.793541] env[63538]: DEBUG nova.virt.hardware [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 668.793741] env[63538]: DEBUG nova.virt.hardware [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 668.793984] env[63538]: DEBUG nova.virt.hardware [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 668.795832] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-408df586-c8bb-4f76-9dfc-cf86cb27e849 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.808413] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ad49e1-8c11-4d2f-9a9b-1ea39eb653ab {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.827609] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fddd8804-beb0-4a7d-bca6-1685af996f68 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "b5593b74-fe89-43f5-a8c6-e73159b4efac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.912s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.973323] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100593, 'name': Rename_Task, 'duration_secs': 0.179167} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.973741] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 668.974111] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2344c811-dfcb-4f6b-b402-4a6c3b782e75 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.982345] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 668.982345] env[63538]: value = "task-5100594" [ 668.982345] env[63538]: _type = "Task" [ 668.982345] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.991699] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100594, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.091800] env[63538]: DEBUG nova.scheduler.client.report [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 669.098269] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.143525] env[63538]: DEBUG oslo_concurrency.lockutils [req-3452fe7c-616f-4673-bbaa-f3369e8de66b req-3d15499c-ca39-40e9-b927-dc5b684ff577 service nova] Releasing lock "refresh_cache-65fc18ff-8901-40d2-8a5b-640eb9768240" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 669.143942] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquired lock "refresh_cache-65fc18ff-8901-40d2-8a5b-640eb9768240" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.144120] env[63538]: DEBUG nova.network.neutron [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 669.161407] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100592, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.817394} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.161710] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 5421e135-9581-4f81-aa8a-2a604887a1df/5421e135-9581-4f81-aa8a-2a604887a1df.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 669.161920] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 669.162959] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-751a3062-cd8d-43c3-b5d6-a27f0af2b46d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.174976] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 669.174976] env[63538]: value = "task-5100595" [ 669.174976] env[63538]: _type = "Task" [ 669.174976] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.186942] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100595, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.271845] env[63538]: DEBUG nova.compute.manager [req-7273b45b-a0b0-40d7-9cea-7fddd87e3c38 req-4cf2a0f2-4e4a-413b-9868-fbcea7a1cf0b service nova] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Received event network-vif-deleted-33b2bb84-c893-4ee8-90de-6696ef21d830 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 669.272068] env[63538]: INFO nova.compute.manager [req-7273b45b-a0b0-40d7-9cea-7fddd87e3c38 req-4cf2a0f2-4e4a-413b-9868-fbcea7a1cf0b service nova] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Neutron deleted interface 33b2bb84-c893-4ee8-90de-6696ef21d830; detaching it from the instance and deleting it from the info cache [ 669.272286] env[63538]: DEBUG nova.network.neutron [req-7273b45b-a0b0-40d7-9cea-7fddd87e3c38 req-4cf2a0f2-4e4a-413b-9868-fbcea7a1cf0b service nova] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.330888] env[63538]: DEBUG nova.compute.manager [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 669.495658] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100594, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.562677] env[63538]: DEBUG nova.network.neutron [-] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.602218] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.887s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 669.602691] env[63538]: DEBUG nova.compute.manager [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 669.606518] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fc9ab23c-7acd-4fce-8b28-1d03b5624126 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 19.820s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.688262] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100595, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071532} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.688548] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 669.689395] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0071e99-5742-4b2a-b722-1fcdccd7e5a1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.720047] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Reconfiguring VM instance instance-0000001c to attach disk [datastore2] 5421e135-9581-4f81-aa8a-2a604887a1df/5421e135-9581-4f81-aa8a-2a604887a1df.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 669.723021] env[63538]: DEBUG nova.network.neutron [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 669.723558] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ff2f348-9ab2-493c-9e28-cec260da565b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.745206] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 669.745206] env[63538]: value = "task-5100596" [ 669.745206] env[63538]: _type = "Task" [ 669.745206] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.755352] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100596, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.776927] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3b539bd5-d8e1-4b6e-bdc8-8135c2180395 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.791298] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dcb9b0b-33b5-482b-aaee-e5ea5d6a36c5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.806843] env[63538]: DEBUG nova.compute.manager [req-9f796e53-8acc-4c8b-a71e-18cf7fa0c847 req-0bbe6043-f4c1-4c74-b8d0-baa9b73f3901 service nova] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Received event network-changed-845bbf0e-88f5-474e-b875-0a12bfaebd27 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 669.807662] env[63538]: DEBUG nova.compute.manager [req-9f796e53-8acc-4c8b-a71e-18cf7fa0c847 req-0bbe6043-f4c1-4c74-b8d0-baa9b73f3901 service nova] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Refreshing instance network info cache due to event network-changed-845bbf0e-88f5-474e-b875-0a12bfaebd27. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 669.807662] env[63538]: DEBUG oslo_concurrency.lockutils [req-9f796e53-8acc-4c8b-a71e-18cf7fa0c847 req-0bbe6043-f4c1-4c74-b8d0-baa9b73f3901 service nova] Acquiring lock "refresh_cache-47500aaa-92fc-454c-badd-d6f8a2203083" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 669.807662] env[63538]: DEBUG oslo_concurrency.lockutils [req-9f796e53-8acc-4c8b-a71e-18cf7fa0c847 req-0bbe6043-f4c1-4c74-b8d0-baa9b73f3901 service nova] Acquired lock "refresh_cache-47500aaa-92fc-454c-badd-d6f8a2203083" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.807662] env[63538]: DEBUG nova.network.neutron [req-9f796e53-8acc-4c8b-a71e-18cf7fa0c847 req-0bbe6043-f4c1-4c74-b8d0-baa9b73f3901 service nova] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Refreshing network info cache for port 845bbf0e-88f5-474e-b875-0a12bfaebd27 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 669.846397] env[63538]: DEBUG nova.compute.manager [req-7273b45b-a0b0-40d7-9cea-7fddd87e3c38 req-4cf2a0f2-4e4a-413b-9868-fbcea7a1cf0b service nova] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Detach interface failed, port_id=33b2bb84-c893-4ee8-90de-6696ef21d830, reason: Instance bf54098e-91a8-403f-a6fe-b58a62daaadb could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 669.872133] env[63538]: DEBUG oslo_concurrency.lockutils [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.958825] env[63538]: DEBUG nova.network.neutron [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Updating instance_info_cache with network_info: [{"id": "0be458b1-bd73-4d0f-8fd8-bcfec3c520c7", "address": "fa:16:3e:9b:3f:d5", "network": {"id": "1ffc7eb0-0516-45ff-a0e3-3f2887487b78", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-550035383-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "452b39ccca6b4fcba39b1e61f0508f14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f256cfee-512d-4192-9aca-6750fdb1cd4c", "external-id": "nsx-vlan-transportzone-821", "segmentation_id": 821, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0be458b1-bd", "ovs_interfaceid": "0be458b1-bd73-4d0f-8fd8-bcfec3c520c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.995728] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100594, 'name': PowerOnVM_Task, 'duration_secs': 0.868389} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.996025] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 669.996264] env[63538]: INFO nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Took 10.04 seconds to spawn the instance on the hypervisor. [ 669.996438] env[63538]: DEBUG nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 669.997269] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc2826c5-dc53-4900-b8bd-838ded96260e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.064419] env[63538]: INFO nova.compute.manager [-] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Took 1.75 seconds to deallocate network for instance. [ 670.110019] env[63538]: DEBUG nova.compute.utils [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 670.110462] env[63538]: DEBUG nova.compute.manager [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 670.110857] env[63538]: DEBUG nova.network.neutron [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 670.132864] env[63538]: DEBUG nova.network.neutron [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Successfully updated port: 27c8d553-e481-41cf-9f67-20912d2adb46 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 670.164274] env[63538]: DEBUG nova.policy [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '78d5688727174c08a29ea7f3ac35e129', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cdfc96ac41be43f9ba0596444eb75737', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 670.263956] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100596, 'name': ReconfigVM_Task, 'duration_secs': 0.490182} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.264713] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Reconfigured VM instance instance-0000001c to attach disk [datastore2] 5421e135-9581-4f81-aa8a-2a604887a1df/5421e135-9581-4f81-aa8a-2a604887a1df.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 670.265579] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-16c5d4ec-400b-47f7-a7d7-578a12ef1b2c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.275422] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 670.275422] env[63538]: value = "task-5100597" [ 670.275422] env[63538]: _type = "Task" [ 670.275422] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.285081] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100597, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.462351] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Releasing lock "refresh_cache-65fc18ff-8901-40d2-8a5b-640eb9768240" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 670.462807] env[63538]: DEBUG nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Instance network_info: |[{"id": "0be458b1-bd73-4d0f-8fd8-bcfec3c520c7", "address": "fa:16:3e:9b:3f:d5", "network": {"id": "1ffc7eb0-0516-45ff-a0e3-3f2887487b78", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-550035383-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "452b39ccca6b4fcba39b1e61f0508f14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f256cfee-512d-4192-9aca-6750fdb1cd4c", "external-id": "nsx-vlan-transportzone-821", "segmentation_id": 821, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0be458b1-bd", "ovs_interfaceid": "0be458b1-bd73-4d0f-8fd8-bcfec3c520c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 670.463165] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:3f:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f256cfee-512d-4192-9aca-6750fdb1cd4c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0be458b1-bd73-4d0f-8fd8-bcfec3c520c7', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 670.471885] env[63538]: DEBUG oslo.service.loopingcall [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 670.474867] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 670.475324] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fad3cd9b-f83c-4ef4-9c70-13f09938ce40 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.502033] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 670.502033] env[63538]: value = "task-5100598" [ 670.502033] env[63538]: _type = "Task" [ 670.502033] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.510640] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100598, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.523364] env[63538]: INFO nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Took 42.86 seconds to build instance. [ 670.574426] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.584244] env[63538]: DEBUG nova.network.neutron [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Successfully created port: e7647c9c-cea3-4b43-ba7e-69aaac6286a8 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 670.618562] env[63538]: DEBUG nova.compute.manager [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 670.637379] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Acquiring lock "refresh_cache-43729260-d138-4e62-9cc5-4db3ca39f5d2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 670.637379] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Acquired lock "refresh_cache-43729260-d138-4e62-9cc5-4db3ca39f5d2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.637379] env[63538]: DEBUG nova.network.neutron [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 670.673646] env[63538]: DEBUG nova.network.neutron [req-9f796e53-8acc-4c8b-a71e-18cf7fa0c847 req-0bbe6043-f4c1-4c74-b8d0-baa9b73f3901 service nova] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Updated VIF entry in instance network info cache for port 845bbf0e-88f5-474e-b875-0a12bfaebd27. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 670.675446] env[63538]: DEBUG nova.network.neutron [req-9f796e53-8acc-4c8b-a71e-18cf7fa0c847 req-0bbe6043-f4c1-4c74-b8d0-baa9b73f3901 service nova] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Updating instance_info_cache with network_info: [{"id": "845bbf0e-88f5-474e-b875-0a12bfaebd27", "address": "fa:16:3e:fe:7c:1e", "network": {"id": "3fa1aaf4-0160-454f-aa6e-0a33051c92b8", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-221507218-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ae58d691f8e4b10a978587c2b8863e1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap845bbf0e-88", "ovs_interfaceid": "845bbf0e-88f5-474e-b875-0a12bfaebd27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.790435] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100597, 'name': Rename_Task, 'duration_secs': 0.346904} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.790923] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 670.791308] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f51c1e3c-a999-4b5e-8ced-74d5900c2f7b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.801945] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 670.801945] env[63538]: value = "task-5100599" [ 670.801945] env[63538]: _type = "Task" [ 670.801945] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.813228] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100599, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.855326] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c33f63af-374d-4602-8691-7a5f9faa4ab8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.862409] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e943d8f2-65a9-4dd9-9396-1bf024c5cd0f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.896084] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-998d83bf-0b94-447d-85eb-2a92802a8dea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.904744] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-843859a2-62af-48e5-9cb3-235d7e8177f7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.919378] env[63538]: DEBUG nova.compute.provider_tree [None req-fc9ab23c-7acd-4fce-8b28-1d03b5624126 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 671.011830] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100598, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.025576] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "ee9fe572-7a17-46db-8330-4b6f632c6b2c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.299s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 671.173016] env[63538]: DEBUG nova.network.neutron [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 671.178528] env[63538]: DEBUG oslo_concurrency.lockutils [req-9f796e53-8acc-4c8b-a71e-18cf7fa0c847 req-0bbe6043-f4c1-4c74-b8d0-baa9b73f3901 service nova] Releasing lock "refresh_cache-47500aaa-92fc-454c-badd-d6f8a2203083" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 671.315989] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100599, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.350647] env[63538]: DEBUG nova.network.neutron [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Updating instance_info_cache with network_info: [{"id": "27c8d553-e481-41cf-9f67-20912d2adb46", "address": "fa:16:3e:fb:de:70", "network": {"id": "3f23850b-a1b9-429c-b4c8-e41786ac3f89", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1688699578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94d1cf2838014527bb9c399ae0cff7ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27c8d553-e4", "ovs_interfaceid": "27c8d553-e481-41cf-9f67-20912d2adb46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.422666] env[63538]: DEBUG nova.scheduler.client.report [None req-fc9ab23c-7acd-4fce-8b28-1d03b5624126 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 671.514044] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100598, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.528721] env[63538]: DEBUG nova.compute.manager [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 671.629900] env[63538]: DEBUG nova.compute.manager [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 671.659901] env[63538]: DEBUG nova.virt.hardware [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 671.660670] env[63538]: DEBUG nova.virt.hardware [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 671.660670] env[63538]: DEBUG nova.virt.hardware [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 671.660863] env[63538]: DEBUG nova.virt.hardware [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 671.661056] env[63538]: DEBUG nova.virt.hardware [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 671.661223] env[63538]: DEBUG nova.virt.hardware [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 671.661441] env[63538]: DEBUG nova.virt.hardware [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 671.661601] env[63538]: DEBUG nova.virt.hardware [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 671.661767] env[63538]: DEBUG nova.virt.hardware [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 671.661948] env[63538]: DEBUG nova.virt.hardware [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 671.662137] env[63538]: DEBUG nova.virt.hardware [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 671.663120] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6688cb87-9586-441d-a8d1-29dac7d7d0aa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.672114] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdcd9727-a6ee-45ab-ab2d-c1224575f6d3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.812856] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100599, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.853557] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Releasing lock "refresh_cache-43729260-d138-4e62-9cc5-4db3ca39f5d2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 671.853937] env[63538]: DEBUG nova.compute.manager [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Instance network_info: |[{"id": "27c8d553-e481-41cf-9f67-20912d2adb46", "address": "fa:16:3e:fb:de:70", "network": {"id": "3f23850b-a1b9-429c-b4c8-e41786ac3f89", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1688699578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94d1cf2838014527bb9c399ae0cff7ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27c8d553-e4", "ovs_interfaceid": "27c8d553-e481-41cf-9f67-20912d2adb46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 671.854374] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:de:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89ef02af-c508-432f-ae29-3a219701d584', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '27c8d553-e481-41cf-9f67-20912d2adb46', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 671.863308] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Creating folder: Project (94d1cf2838014527bb9c399ae0cff7ce). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 671.863612] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aace6de3-af38-47ee-910b-2cc5cceb3572 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.877046] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Created folder: Project (94d1cf2838014527bb9c399ae0cff7ce) in parent group-v992234. [ 671.877227] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Creating folder: Instances. Parent ref: group-v992314. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 671.879788] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bda8d3c9-09c9-4389-bf5e-b95957aa6c77 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.881495] env[63538]: DEBUG nova.compute.manager [req-a32bce28-e4c3-4870-9e7f-3afcebe3b9ed req-3c8c2d23-b871-4b44-9ec6-9c1e7c6d924a service nova] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Received event network-vif-plugged-27c8d553-e481-41cf-9f67-20912d2adb46 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 671.881697] env[63538]: DEBUG oslo_concurrency.lockutils [req-a32bce28-e4c3-4870-9e7f-3afcebe3b9ed req-3c8c2d23-b871-4b44-9ec6-9c1e7c6d924a service nova] Acquiring lock "43729260-d138-4e62-9cc5-4db3ca39f5d2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 671.881904] env[63538]: DEBUG oslo_concurrency.lockutils [req-a32bce28-e4c3-4870-9e7f-3afcebe3b9ed req-3c8c2d23-b871-4b44-9ec6-9c1e7c6d924a service nova] Lock "43729260-d138-4e62-9cc5-4db3ca39f5d2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 671.882084] env[63538]: DEBUG oslo_concurrency.lockutils [req-a32bce28-e4c3-4870-9e7f-3afcebe3b9ed req-3c8c2d23-b871-4b44-9ec6-9c1e7c6d924a service nova] Lock "43729260-d138-4e62-9cc5-4db3ca39f5d2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 671.882352] env[63538]: DEBUG nova.compute.manager [req-a32bce28-e4c3-4870-9e7f-3afcebe3b9ed req-3c8c2d23-b871-4b44-9ec6-9c1e7c6d924a service nova] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] No waiting events found dispatching network-vif-plugged-27c8d553-e481-41cf-9f67-20912d2adb46 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 671.882574] env[63538]: WARNING nova.compute.manager [req-a32bce28-e4c3-4870-9e7f-3afcebe3b9ed req-3c8c2d23-b871-4b44-9ec6-9c1e7c6d924a service nova] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Received unexpected event network-vif-plugged-27c8d553-e481-41cf-9f67-20912d2adb46 for instance with vm_state building and task_state spawning. [ 671.882742] env[63538]: DEBUG nova.compute.manager [req-a32bce28-e4c3-4870-9e7f-3afcebe3b9ed req-3c8c2d23-b871-4b44-9ec6-9c1e7c6d924a service nova] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Received event network-changed-27c8d553-e481-41cf-9f67-20912d2adb46 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 671.882915] env[63538]: DEBUG nova.compute.manager [req-a32bce28-e4c3-4870-9e7f-3afcebe3b9ed req-3c8c2d23-b871-4b44-9ec6-9c1e7c6d924a service nova] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Refreshing instance network info cache due to event network-changed-27c8d553-e481-41cf-9f67-20912d2adb46. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 671.883115] env[63538]: DEBUG oslo_concurrency.lockutils [req-a32bce28-e4c3-4870-9e7f-3afcebe3b9ed req-3c8c2d23-b871-4b44-9ec6-9c1e7c6d924a service nova] Acquiring lock "refresh_cache-43729260-d138-4e62-9cc5-4db3ca39f5d2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 671.883263] env[63538]: DEBUG oslo_concurrency.lockutils [req-a32bce28-e4c3-4870-9e7f-3afcebe3b9ed req-3c8c2d23-b871-4b44-9ec6-9c1e7c6d924a service nova] Acquired lock "refresh_cache-43729260-d138-4e62-9cc5-4db3ca39f5d2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.883418] env[63538]: DEBUG nova.network.neutron [req-a32bce28-e4c3-4870-9e7f-3afcebe3b9ed req-3c8c2d23-b871-4b44-9ec6-9c1e7c6d924a service nova] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Refreshing network info cache for port 27c8d553-e481-41cf-9f67-20912d2adb46 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 671.894591] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Created folder: Instances in parent group-v992314. [ 671.894848] env[63538]: DEBUG oslo.service.loopingcall [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 671.895023] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 671.895227] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2bbe0b2b-02a2-423c-83a7-da4144f68710 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.915184] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 671.915184] env[63538]: value = "task-5100602" [ 671.915184] env[63538]: _type = "Task" [ 671.915184] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.923719] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100602, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.021211] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100598, 'name': CreateVM_Task, 'duration_secs': 1.410534} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.021211] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 672.022166] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.022396] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.022807] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 672.023132] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41c53839-d23b-40c2-a5c1-99d9df7f4171 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.030314] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 672.030314] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5274ffab-ddce-41c8-d7f9-ed87111fd79f" [ 672.030314] env[63538]: _type = "Task" [ 672.030314] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.042547] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5274ffab-ddce-41c8-d7f9-ed87111fd79f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.053302] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 672.254620] env[63538]: DEBUG nova.compute.manager [req-ee9c1629-34f2-4e17-828f-4421a46565cf req-a15c43f5-ee74-41ed-8e3f-91eb5ec9e9f8 service nova] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Received event network-vif-plugged-e7647c9c-cea3-4b43-ba7e-69aaac6286a8 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 672.254936] env[63538]: DEBUG oslo_concurrency.lockutils [req-ee9c1629-34f2-4e17-828f-4421a46565cf req-a15c43f5-ee74-41ed-8e3f-91eb5ec9e9f8 service nova] Acquiring lock "a7bb1869-5553-40d8-9c0b-366ccdef5fae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 672.255307] env[63538]: DEBUG oslo_concurrency.lockutils [req-ee9c1629-34f2-4e17-828f-4421a46565cf req-a15c43f5-ee74-41ed-8e3f-91eb5ec9e9f8 service nova] Lock "a7bb1869-5553-40d8-9c0b-366ccdef5fae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 672.255508] env[63538]: DEBUG oslo_concurrency.lockutils [req-ee9c1629-34f2-4e17-828f-4421a46565cf req-a15c43f5-ee74-41ed-8e3f-91eb5ec9e9f8 service nova] Lock "a7bb1869-5553-40d8-9c0b-366ccdef5fae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.255689] env[63538]: DEBUG nova.compute.manager [req-ee9c1629-34f2-4e17-828f-4421a46565cf req-a15c43f5-ee74-41ed-8e3f-91eb5ec9e9f8 service nova] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] No waiting events found dispatching network-vif-plugged-e7647c9c-cea3-4b43-ba7e-69aaac6286a8 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 672.255856] env[63538]: WARNING nova.compute.manager [req-ee9c1629-34f2-4e17-828f-4421a46565cf req-a15c43f5-ee74-41ed-8e3f-91eb5ec9e9f8 service nova] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Received unexpected event network-vif-plugged-e7647c9c-cea3-4b43-ba7e-69aaac6286a8 for instance with vm_state building and task_state spawning. [ 672.316561] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100599, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.368310] env[63538]: DEBUG nova.network.neutron [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Successfully updated port: e7647c9c-cea3-4b43-ba7e-69aaac6286a8 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 672.429501] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100602, 'name': CreateVM_Task, 'duration_secs': 0.305264} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.429690] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 672.430335] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.433726] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fc9ab23c-7acd-4fce-8b28-1d03b5624126 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.827s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.436501] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.243s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 672.437892] env[63538]: INFO nova.compute.claims [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 672.542562] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5274ffab-ddce-41c8-d7f9-ed87111fd79f, 'name': SearchDatastore_Task, 'duration_secs': 0.01225} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.542926] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 672.543173] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 672.543409] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.543554] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.543768] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 672.544081] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.544677] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 672.544677] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d9ecefcb-535f-4ca5-94da-f2a9fd0f44ee {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.546451] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20b45f37-f36b-4503-bffc-715422008c2c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.551528] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Waiting for the task: (returnval){ [ 672.551528] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5287b3b8-2fea-1871-b999-5e409f09d4c3" [ 672.551528] env[63538]: _type = "Task" [ 672.551528] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.556468] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 672.556739] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 672.560121] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b66ebaff-ca75-46a0-b571-e6e0521bec20 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.562663] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5287b3b8-2fea-1871-b999-5e409f09d4c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.566080] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 672.566080] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5285aef2-3df9-2f13-062b-bd4c095af377" [ 672.566080] env[63538]: _type = "Task" [ 672.566080] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.576639] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5285aef2-3df9-2f13-062b-bd4c095af377, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.677214] env[63538]: DEBUG nova.network.neutron [req-a32bce28-e4c3-4870-9e7f-3afcebe3b9ed req-3c8c2d23-b871-4b44-9ec6-9c1e7c6d924a service nova] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Updated VIF entry in instance network info cache for port 27c8d553-e481-41cf-9f67-20912d2adb46. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 672.677792] env[63538]: DEBUG nova.network.neutron [req-a32bce28-e4c3-4870-9e7f-3afcebe3b9ed req-3c8c2d23-b871-4b44-9ec6-9c1e7c6d924a service nova] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Updating instance_info_cache with network_info: [{"id": "27c8d553-e481-41cf-9f67-20912d2adb46", "address": "fa:16:3e:fb:de:70", "network": {"id": "3f23850b-a1b9-429c-b4c8-e41786ac3f89", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1688699578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94d1cf2838014527bb9c399ae0cff7ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27c8d553-e4", "ovs_interfaceid": "27c8d553-e481-41cf-9f67-20912d2adb46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.814655] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100599, 'name': PowerOnVM_Task, 'duration_secs': 1.555307} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.814942] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 672.815164] env[63538]: INFO nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Took 9.94 seconds to spawn the instance on the hypervisor. [ 672.815341] env[63538]: DEBUG nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 672.816169] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce7c3296-b2fc-4a99-a729-0426fe4f27bc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.871480] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "refresh_cache-a7bb1869-5553-40d8-9c0b-366ccdef5fae" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.871480] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquired lock "refresh_cache-a7bb1869-5553-40d8-9c0b-366ccdef5fae" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.871796] env[63538]: DEBUG nova.network.neutron [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 673.002058] env[63538]: INFO nova.scheduler.client.report [None req-fc9ab23c-7acd-4fce-8b28-1d03b5624126 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Deleted allocation for migration 2e763caf-b452-499b-a6eb-169dbc8837b2 [ 673.063500] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5287b3b8-2fea-1871-b999-5e409f09d4c3, 'name': SearchDatastore_Task, 'duration_secs': 0.01105} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.063814] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.064079] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 673.064321] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 673.076768] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5285aef2-3df9-2f13-062b-bd4c095af377, 'name': SearchDatastore_Task, 'duration_secs': 0.009756} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.077578] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6a16ee8-8137-4da4-9191-a6bac1afd5e9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.083373] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 673.083373] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5226e508-5e7f-a133-147c-4067881e7e8b" [ 673.083373] env[63538]: _type = "Task" [ 673.083373] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.092838] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5226e508-5e7f-a133-147c-4067881e7e8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.181979] env[63538]: DEBUG oslo_concurrency.lockutils [req-a32bce28-e4c3-4870-9e7f-3afcebe3b9ed req-3c8c2d23-b871-4b44-9ec6-9c1e7c6d924a service nova] Releasing lock "refresh_cache-43729260-d138-4e62-9cc5-4db3ca39f5d2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.334604] env[63538]: INFO nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Took 43.81 seconds to build instance. [ 673.412149] env[63538]: DEBUG nova.network.neutron [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 673.508923] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fc9ab23c-7acd-4fce-8b28-1d03b5624126 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "2e1b0bc7-3909-48e2-b9be-26822a57ee67" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 28.664s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 673.594505] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5226e508-5e7f-a133-147c-4067881e7e8b, 'name': SearchDatastore_Task, 'duration_secs': 0.025505} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.597699] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.597980] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 65fc18ff-8901-40d2-8a5b-640eb9768240/65fc18ff-8901-40d2-8a5b-640eb9768240.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 673.598521] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.598714] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 673.598934] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e7daf186-535f-4598-a955-36a870304409 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.600874] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0876b945-ce2a-4386-bd47-a4891a7b042c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.609572] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 673.609572] env[63538]: value = "task-5100603" [ 673.609572] env[63538]: _type = "Task" [ 673.609572] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.614149] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 673.614334] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 673.615429] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d9cd12a-6d7b-4882-a7f2-184328e23d2d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.624941] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100603, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.630108] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Waiting for the task: (returnval){ [ 673.630108] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52da2a2a-152d-509f-7ff9-86839060a209" [ 673.630108] env[63538]: _type = "Task" [ 673.630108] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.641991] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52da2a2a-152d-509f-7ff9-86839060a209, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.837719] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "5421e135-9581-4f81-aa8a-2a604887a1df" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.037s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 673.848272] env[63538]: DEBUG nova.network.neutron [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Updating instance_info_cache with network_info: [{"id": "e7647c9c-cea3-4b43-ba7e-69aaac6286a8", "address": "fa:16:3e:9a:ba:2f", "network": {"id": "c4ac48b0-a0c2-4910-bcb9-3f42d415030f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2107782775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdfc96ac41be43f9ba0596444eb75737", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7647c9c-ce", "ovs_interfaceid": "e7647c9c-cea3-4b43-ba7e-69aaac6286a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.085215] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0655520-4f07-4009-88c4-e5da6ca6fd0c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.094515] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f18222-7be8-4685-b935-8a5b8bef48fd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.133573] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131489c9-eb04-4837-adb7-14fa28ad0337 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.145294] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100603, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.150680] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52da2a2a-152d-509f-7ff9-86839060a209, 'name': SearchDatastore_Task, 'duration_secs': 0.01765} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.152728] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc67d470-5a22-4adf-84f1-25298eaa66e3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.157290] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6c9de4c-3f88-49e7-8147-a9f078d77bcc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.165041] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Waiting for the task: (returnval){ [ 674.165041] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52988e33-c6df-04a7-0f06-ccf6a93937b4" [ 674.165041] env[63538]: _type = "Task" [ 674.165041] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.175505] env[63538]: DEBUG nova.compute.provider_tree [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 674.186047] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52988e33-c6df-04a7-0f06-ccf6a93937b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.295537] env[63538]: DEBUG nova.compute.manager [req-81a6a3f9-3672-400c-8b62-c55916df53d0 req-8d92a561-4578-4eed-94b4-65affbf17a2f service nova] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Received event network-changed-e7647c9c-cea3-4b43-ba7e-69aaac6286a8 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 674.295739] env[63538]: DEBUG nova.compute.manager [req-81a6a3f9-3672-400c-8b62-c55916df53d0 req-8d92a561-4578-4eed-94b4-65affbf17a2f service nova] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Refreshing instance network info cache due to event network-changed-e7647c9c-cea3-4b43-ba7e-69aaac6286a8. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 674.295922] env[63538]: DEBUG oslo_concurrency.lockutils [req-81a6a3f9-3672-400c-8b62-c55916df53d0 req-8d92a561-4578-4eed-94b4-65affbf17a2f service nova] Acquiring lock "refresh_cache-a7bb1869-5553-40d8-9c0b-366ccdef5fae" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 674.341509] env[63538]: DEBUG nova.compute.manager [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 674.355511] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Releasing lock "refresh_cache-a7bb1869-5553-40d8-9c0b-366ccdef5fae" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 674.355580] env[63538]: DEBUG nova.compute.manager [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Instance network_info: |[{"id": "e7647c9c-cea3-4b43-ba7e-69aaac6286a8", "address": "fa:16:3e:9a:ba:2f", "network": {"id": "c4ac48b0-a0c2-4910-bcb9-3f42d415030f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2107782775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdfc96ac41be43f9ba0596444eb75737", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7647c9c-ce", "ovs_interfaceid": "e7647c9c-cea3-4b43-ba7e-69aaac6286a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 674.356308] env[63538]: DEBUG oslo_concurrency.lockutils [req-81a6a3f9-3672-400c-8b62-c55916df53d0 req-8d92a561-4578-4eed-94b4-65affbf17a2f service nova] Acquired lock "refresh_cache-a7bb1869-5553-40d8-9c0b-366ccdef5fae" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.356308] env[63538]: DEBUG nova.network.neutron [req-81a6a3f9-3672-400c-8b62-c55916df53d0 req-8d92a561-4578-4eed-94b4-65affbf17a2f service nova] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Refreshing network info cache for port e7647c9c-cea3-4b43-ba7e-69aaac6286a8 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 674.357714] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:ba:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c894ab55-c869-4530-9702-cb46d173ce94', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e7647c9c-cea3-4b43-ba7e-69aaac6286a8', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 674.367313] env[63538]: DEBUG oslo.service.loopingcall [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 674.368192] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 674.368912] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-26c75668-4a8a-4793-8931-ffe6b18a7e65 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.389869] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 674.389869] env[63538]: value = "task-5100604" [ 674.389869] env[63538]: _type = "Task" [ 674.389869] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.399495] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100604, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.644845] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100603, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.680362] env[63538]: DEBUG nova.scheduler.client.report [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 674.687789] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52988e33-c6df-04a7-0f06-ccf6a93937b4, 'name': SearchDatastore_Task, 'duration_secs': 0.07775} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.688311] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 674.688553] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 43729260-d138-4e62-9cc5-4db3ca39f5d2/43729260-d138-4e62-9cc5-4db3ca39f5d2.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 674.688813] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a12125f6-c842-45b8-93a8-3fe4dddd4251 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.699314] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Waiting for the task: (returnval){ [ 674.699314] env[63538]: value = "task-5100605" [ 674.699314] env[63538]: _type = "Task" [ 674.699314] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.709755] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': task-5100605, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.878830] env[63538]: DEBUG oslo_concurrency.lockutils [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.902420] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100604, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.142091] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100603, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.189617] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.753s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 675.190144] env[63538]: DEBUG nova.compute.manager [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 675.193021] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 20.197s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.193196] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 675.193355] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63538) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 675.193703] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.880s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.194034] env[63538]: DEBUG nova.objects.instance [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Lazy-loading 'resources' on Instance uuid 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 675.198085] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d703dbe-3f9c-4813-957d-75b8b3e70d0b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.216418] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': task-5100605, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.217497] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be054840-4660-42af-8a11-5ffb16a257c1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.234800] env[63538]: DEBUG nova.network.neutron [req-81a6a3f9-3672-400c-8b62-c55916df53d0 req-8d92a561-4578-4eed-94b4-65affbf17a2f service nova] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Updated VIF entry in instance network info cache for port e7647c9c-cea3-4b43-ba7e-69aaac6286a8. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 675.235255] env[63538]: DEBUG nova.network.neutron [req-81a6a3f9-3672-400c-8b62-c55916df53d0 req-8d92a561-4578-4eed-94b4-65affbf17a2f service nova] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Updating instance_info_cache with network_info: [{"id": "e7647c9c-cea3-4b43-ba7e-69aaac6286a8", "address": "fa:16:3e:9a:ba:2f", "network": {"id": "c4ac48b0-a0c2-4910-bcb9-3f42d415030f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2107782775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdfc96ac41be43f9ba0596444eb75737", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7647c9c-ce", "ovs_interfaceid": "e7647c9c-cea3-4b43-ba7e-69aaac6286a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.236918] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce06881b-24fd-4d79-86fb-2ca91b214ee6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.247402] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20e4e4c3-ea5f-42d2-8665-dda6f43f6a05 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.290554] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178545MB free_disk=95GB free_vcpus=48 pci_devices=None {{(pid=63538) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 675.290735] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.402471] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100604, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.642101] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100603, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.770086} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.642661] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 65fc18ff-8901-40d2-8a5b-640eb9768240/65fc18ff-8901-40d2-8a5b-640eb9768240.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 675.643043] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 675.643158] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e97e0cd0-ad9b-4a54-be24-2d9eabb626eb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.652031] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 675.652031] env[63538]: value = "task-5100606" [ 675.652031] env[63538]: _type = "Task" [ 675.652031] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.664045] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100606, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.699104] env[63538]: DEBUG nova.compute.utils [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 675.704448] env[63538]: DEBUG nova.compute.manager [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 675.704741] env[63538]: DEBUG nova.network.neutron [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 675.718711] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': task-5100605, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.741749] env[63538]: DEBUG oslo_concurrency.lockutils [req-81a6a3f9-3672-400c-8b62-c55916df53d0 req-8d92a561-4578-4eed-94b4-65affbf17a2f service nova] Releasing lock "refresh_cache-a7bb1869-5553-40d8-9c0b-366ccdef5fae" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 675.782810] env[63538]: DEBUG nova.policy [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f7c24e452e524341b955151538a66f59', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f4ac96b6c57f41b9b897adcb8ed56904', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 675.907302] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100604, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.176527] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100606, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100271} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.176527] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 676.177787] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e2887d-8c57-4616-a229-d998cd063c07 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.187379] env[63538]: DEBUG nova.network.neutron [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Successfully created port: 1f5998f7-6ddc-4ca5-9082-f2c44c59a95e {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 676.206895] env[63538]: DEBUG nova.compute.manager [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 676.223986] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] 65fc18ff-8901-40d2-8a5b-640eb9768240/65fc18ff-8901-40d2-8a5b-640eb9768240.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 676.228147] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-224b9bca-2c19-45a6-80fc-ae83dedfe633 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.256954] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': task-5100605, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.369273} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.258667] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 43729260-d138-4e62-9cc5-4db3ca39f5d2/43729260-d138-4e62-9cc5-4db3ca39f5d2.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 676.258884] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 676.259223] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 676.259223] env[63538]: value = "task-5100607" [ 676.259223] env[63538]: _type = "Task" [ 676.259223] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.259490] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9678aefb-55cf-4b7e-946b-4c6bb56035b6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.274621] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100607, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.276055] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Waiting for the task: (returnval){ [ 676.276055] env[63538]: value = "task-5100608" [ 676.276055] env[63538]: _type = "Task" [ 676.276055] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.317635] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "a2e036ae-318b-44ea-9db0-10fa3838728b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 676.317966] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "a2e036ae-318b-44ea-9db0-10fa3838728b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 676.392875] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82cf1903-44b7-4847-86cb-0605f1cc5a1d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.408656] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b422ee-82d4-497a-991b-11713df84761 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.412633] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100604, 'name': CreateVM_Task, 'duration_secs': 1.861129} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.412811] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 676.413985] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 676.414157] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.414474] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 676.414707] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7948f944-2ec7-43b0-92b0-efa81a9b067c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.444972] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128276c2-6fd0-418c-9c2f-231895cecb99 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.449397] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 676.449397] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d61b95-2e7d-9946-30ce-43557aced691" [ 676.449397] env[63538]: _type = "Task" [ 676.449397] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.459361] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3e1af9-3a47-4abb-b55b-7f8d4d1ca050 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.466793] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d61b95-2e7d-9946-30ce-43557aced691, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.477662] env[63538]: DEBUG nova.compute.provider_tree [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 676.773415] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100607, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.787306] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': task-5100608, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.288861} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.787682] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 676.788748] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d73910-4007-4a7e-90c9-e42b56423a37 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.812621] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] 43729260-d138-4e62-9cc5-4db3ca39f5d2/43729260-d138-4e62-9cc5-4db3ca39f5d2.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 676.813873] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ed6e556-912f-4ac4-b065-79e77cf0b023 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.836333] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Waiting for the task: (returnval){ [ 676.836333] env[63538]: value = "task-5100609" [ 676.836333] env[63538]: _type = "Task" [ 676.836333] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.845689] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': task-5100609, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.961402] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d61b95-2e7d-9946-30ce-43557aced691, 'name': SearchDatastore_Task, 'duration_secs': 0.025896} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.961699] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 676.961928] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 676.962194] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 676.962341] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.962519] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 676.962808] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e474fa6a-a392-44ed-8091-9c0b96e9706e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.971779] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 676.971959] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 676.972699] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c8912da-bb3e-4319-8674-d38dd31c3e86 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.978642] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 676.978642] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52fa025b-f242-ff0c-80bb-65e9e30897c1" [ 676.978642] env[63538]: _type = "Task" [ 676.978642] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.982857] env[63538]: DEBUG nova.scheduler.client.report [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 676.991101] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52fa025b-f242-ff0c-80bb-65e9e30897c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.231095] env[63538]: DEBUG nova.compute.manager [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 677.266038] env[63538]: DEBUG nova.virt.hardware [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 677.266570] env[63538]: DEBUG nova.virt.hardware [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 677.266894] env[63538]: DEBUG nova.virt.hardware [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 677.267246] env[63538]: DEBUG nova.virt.hardware [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 677.267513] env[63538]: DEBUG nova.virt.hardware [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 677.267775] env[63538]: DEBUG nova.virt.hardware [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 677.268144] env[63538]: DEBUG nova.virt.hardware [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 677.269972] env[63538]: DEBUG nova.virt.hardware [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 677.269972] env[63538]: DEBUG nova.virt.hardware [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 677.269972] env[63538]: DEBUG nova.virt.hardware [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 677.269972] env[63538]: DEBUG nova.virt.hardware [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 677.272959] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1950d8-bcf7-48de-803e-48e9090b0bb1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.289404] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100607, 'name': ReconfigVM_Task, 'duration_secs': 0.871231} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.295053] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47712901-a47e-404c-870e-329ab9547946 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.297299] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Reconfigured VM instance instance-0000001d to attach disk [datastore1] 65fc18ff-8901-40d2-8a5b-640eb9768240/65fc18ff-8901-40d2-8a5b-640eb9768240.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 677.298247] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5e190090-25ec-47af-8639-ec536364e667 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.314721] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 677.314721] env[63538]: value = "task-5100610" [ 677.314721] env[63538]: _type = "Task" [ 677.314721] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.349793] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': task-5100609, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.491584] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.298s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 677.493844] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52fa025b-f242-ff0c-80bb-65e9e30897c1, 'name': SearchDatastore_Task, 'duration_secs': 0.013054} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.495293] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.018s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 677.495972] env[63538]: INFO nova.compute.claims [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 677.500132] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f3300b2-ab37-4f39-8a17-849c23040ec8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.506423] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 677.506423] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ee3b96-7b7d-b0a9-3fe2-d61c8ed84f94" [ 677.506423] env[63538]: _type = "Task" [ 677.506423] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.516929] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ee3b96-7b7d-b0a9-3fe2-d61c8ed84f94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.519065] env[63538]: INFO nova.scheduler.client.report [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Deleted allocations for instance 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a [ 677.830888] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100610, 'name': Rename_Task, 'duration_secs': 0.252297} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.831272] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 677.831875] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d86d499-eda5-495c-95dc-35dc844c0a36 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.839761] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 677.839761] env[63538]: value = "task-5100611" [ 677.839761] env[63538]: _type = "Task" [ 677.839761] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.852230] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100611, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.856065] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': task-5100609, 'name': ReconfigVM_Task, 'duration_secs': 0.606033} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.856395] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Reconfigured VM instance instance-0000001e to attach disk [datastore1] 43729260-d138-4e62-9cc5-4db3ca39f5d2/43729260-d138-4e62-9cc5-4db3ca39f5d2.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 677.857107] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-93ca1ad6-b683-495a-aa35-27fc4fe6d10c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.864656] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Waiting for the task: (returnval){ [ 677.864656] env[63538]: value = "task-5100612" [ 677.864656] env[63538]: _type = "Task" [ 677.864656] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.875112] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': task-5100612, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.019045] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ee3b96-7b7d-b0a9-3fe2-d61c8ed84f94, 'name': SearchDatastore_Task, 'duration_secs': 0.01346} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.019984] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 678.020271] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] a7bb1869-5553-40d8-9c0b-366ccdef5fae/a7bb1869-5553-40d8-9c0b-366ccdef5fae.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 678.021848] env[63538]: DEBUG nova.compute.manager [req-03a3d0dc-e465-46a6-8080-a2f121e69ff9 req-a37cfc0c-0092-4924-a8f6-fc9a96a02813 service nova] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Received event network-vif-plugged-1f5998f7-6ddc-4ca5-9082-f2c44c59a95e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 678.022046] env[63538]: DEBUG oslo_concurrency.lockutils [req-03a3d0dc-e465-46a6-8080-a2f121e69ff9 req-a37cfc0c-0092-4924-a8f6-fc9a96a02813 service nova] Acquiring lock "4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 678.022226] env[63538]: DEBUG oslo_concurrency.lockutils [req-03a3d0dc-e465-46a6-8080-a2f121e69ff9 req-a37cfc0c-0092-4924-a8f6-fc9a96a02813 service nova] Lock "4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.022390] env[63538]: DEBUG oslo_concurrency.lockutils [req-03a3d0dc-e465-46a6-8080-a2f121e69ff9 req-a37cfc0c-0092-4924-a8f6-fc9a96a02813 service nova] Lock "4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.022557] env[63538]: DEBUG nova.compute.manager [req-03a3d0dc-e465-46a6-8080-a2f121e69ff9 req-a37cfc0c-0092-4924-a8f6-fc9a96a02813 service nova] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] No waiting events found dispatching network-vif-plugged-1f5998f7-6ddc-4ca5-9082-f2c44c59a95e {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 678.022721] env[63538]: WARNING nova.compute.manager [req-03a3d0dc-e465-46a6-8080-a2f121e69ff9 req-a37cfc0c-0092-4924-a8f6-fc9a96a02813 service nova] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Received unexpected event network-vif-plugged-1f5998f7-6ddc-4ca5-9082-f2c44c59a95e for instance with vm_state building and task_state spawning. [ 678.023321] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e477a0f1-01e6-48f7-99b1-3c4625b99f3c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.030763] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ca01867c-7aaf-4091-8ade-f7efc890262b tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Lock "1e33b68e-8509-4ec4-8ec4-dc758aae9a5a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.275s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.036315] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 678.036315] env[63538]: value = "task-5100613" [ 678.036315] env[63538]: _type = "Task" [ 678.036315] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.050061] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100613, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.125351] env[63538]: DEBUG nova.network.neutron [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Successfully updated port: 1f5998f7-6ddc-4ca5-9082-f2c44c59a95e {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 678.355410] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100611, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.375490] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': task-5100612, 'name': Rename_Task, 'duration_secs': 0.242494} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.375770] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 678.376030] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d006be32-5ade-4d25-bb8e-6ee5a05c94c5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.383817] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Waiting for the task: (returnval){ [ 678.383817] env[63538]: value = "task-5100614" [ 678.383817] env[63538]: _type = "Task" [ 678.383817] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.392485] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': task-5100614, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.552069] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100613, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.627504] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Acquiring lock "refresh_cache-4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 678.627655] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Acquired lock "refresh_cache-4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.627810] env[63538]: DEBUG nova.network.neutron [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 678.856945] env[63538]: DEBUG oslo_vmware.api [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100611, 'name': PowerOnVM_Task, 'duration_secs': 0.9536} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.860116] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 678.860365] env[63538]: INFO nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Took 13.10 seconds to spawn the instance on the hypervisor. [ 678.860624] env[63538]: DEBUG nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 678.862071] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-206e805e-0256-4d76-97c7-99b4fdd23e8b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.899807] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': task-5100614, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.054178] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100613, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.785772} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.054488] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] a7bb1869-5553-40d8-9c0b-366ccdef5fae/a7bb1869-5553-40d8-9c0b-366ccdef5fae.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 679.054732] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 679.054965] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6e7ecb6c-a8a8-4dce-b3e2-f6937db12f34 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.063217] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 679.063217] env[63538]: value = "task-5100615" [ 679.063217] env[63538]: _type = "Task" [ 679.063217] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.073014] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100615, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.122777] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-081922f3-7017-4191-876a-39d56a48b035 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.130674] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8aa7d2f-7bc2-4d3f-912b-d35f402af481 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.164498] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7fbd845-2d22-4149-9843-4433779d211c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.172881] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d4a88c-621a-4303-9022-a111a1c4b4df {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.177581] env[63538]: DEBUG nova.network.neutron [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 679.191149] env[63538]: DEBUG nova.compute.provider_tree [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 679.380064] env[63538]: INFO nova.compute.manager [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Took 46.56 seconds to build instance. [ 679.382608] env[63538]: DEBUG nova.network.neutron [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Updating instance_info_cache with network_info: [{"id": "1f5998f7-6ddc-4ca5-9082-f2c44c59a95e", "address": "fa:16:3e:37:22:9d", "network": {"id": "8d832d66-6d5f-4524-a53f-f2d46e94a747", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-881199195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4ac96b6c57f41b9b897adcb8ed56904", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf63c3c8-d774-4b81-9b12-848612a96076", "external-id": "nsx-vlan-transportzone-315", "segmentation_id": 315, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f5998f7-6d", "ovs_interfaceid": "1f5998f7-6ddc-4ca5-9082-f2c44c59a95e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.394541] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': task-5100614, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.575060] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100615, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.274936} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.575060] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 679.575060] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98deef84-197e-407f-a131-a79867acc55a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.599030] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] a7bb1869-5553-40d8-9c0b-366ccdef5fae/a7bb1869-5553-40d8-9c0b-366ccdef5fae.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 679.599030] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d4a851e-e4cb-4b10-be95-f1bd73c1fcc4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.619559] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 679.619559] env[63538]: value = "task-5100616" [ 679.619559] env[63538]: _type = "Task" [ 679.619559] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.630413] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100616, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.694457] env[63538]: DEBUG nova.scheduler.client.report [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 679.788591] env[63538]: DEBUG oslo_concurrency.lockutils [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquiring lock "ee9fe572-7a17-46db-8330-4b6f632c6b2c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 679.789572] env[63538]: DEBUG oslo_concurrency.lockutils [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "ee9fe572-7a17-46db-8330-4b6f632c6b2c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 679.789572] env[63538]: DEBUG oslo_concurrency.lockutils [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquiring lock "ee9fe572-7a17-46db-8330-4b6f632c6b2c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 679.789572] env[63538]: DEBUG oslo_concurrency.lockutils [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "ee9fe572-7a17-46db-8330-4b6f632c6b2c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 679.789687] env[63538]: DEBUG oslo_concurrency.lockutils [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "ee9fe572-7a17-46db-8330-4b6f632c6b2c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 679.791909] env[63538]: INFO nova.compute.manager [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Terminating instance [ 679.794127] env[63538]: DEBUG nova.compute.manager [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 679.794326] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 679.795157] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c995ca7-531c-485b-bd53-a404c466e270 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.803953] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 679.804569] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d63bd6f-7d5e-45e4-9575-c606820bed7b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.812532] env[63538]: DEBUG oslo_vmware.api [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 679.812532] env[63538]: value = "task-5100617" [ 679.812532] env[63538]: _type = "Task" [ 679.812532] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.823783] env[63538]: DEBUG oslo_vmware.api [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100617, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.885252] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e2e7f1c6-bbfa-4b16-932e-66fc5f88f0df tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "65fc18ff-8901-40d2-8a5b-640eb9768240" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.040s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 679.885854] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Releasing lock "refresh_cache-4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 679.886242] env[63538]: DEBUG nova.compute.manager [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Instance network_info: |[{"id": "1f5998f7-6ddc-4ca5-9082-f2c44c59a95e", "address": "fa:16:3e:37:22:9d", "network": {"id": "8d832d66-6d5f-4524-a53f-f2d46e94a747", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-881199195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4ac96b6c57f41b9b897adcb8ed56904", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf63c3c8-d774-4b81-9b12-848612a96076", "external-id": "nsx-vlan-transportzone-315", "segmentation_id": 315, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f5998f7-6d", "ovs_interfaceid": "1f5998f7-6ddc-4ca5-9082-f2c44c59a95e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 679.888151] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:22:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cf63c3c8-d774-4b81-9b12-848612a96076', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1f5998f7-6ddc-4ca5-9082-f2c44c59a95e', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 679.896451] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Creating folder: Project (f4ac96b6c57f41b9b897adcb8ed56904). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 679.897634] env[63538]: DEBUG nova.compute.manager [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 679.903400] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a2f88606-ee8e-4dd4-8d64-8f1ec610d490 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.913879] env[63538]: DEBUG oslo_vmware.api [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': task-5100614, 'name': PowerOnVM_Task, 'duration_secs': 1.292224} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.914861] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 679.914861] env[63538]: INFO nova.compute.manager [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Took 11.16 seconds to spawn the instance on the hypervisor. [ 679.914861] env[63538]: DEBUG nova.compute.manager [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 679.915465] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a41b672d-adfc-4d04-b6df-646ec976e406 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.920045] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Created folder: Project (f4ac96b6c57f41b9b897adcb8ed56904) in parent group-v992234. [ 679.920045] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Creating folder: Instances. Parent ref: group-v992318. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 679.920449] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e1deced0-73b8-4bea-af73-612caa4d3ef6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.937830] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Created folder: Instances in parent group-v992318. [ 679.938098] env[63538]: DEBUG oslo.service.loopingcall [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 679.938324] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 679.939547] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-66793bf6-3b5a-4254-b80e-4d62238b9640 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.960848] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 679.960848] env[63538]: value = "task-5100620" [ 679.960848] env[63538]: _type = "Task" [ 679.960848] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.970840] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100620, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.051831] env[63538]: DEBUG nova.compute.manager [req-33306bc8-d093-4622-9dc4-958186510b5c req-fc43905a-6229-4704-a69c-7aa3312d85e8 service nova] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Received event network-changed-1f5998f7-6ddc-4ca5-9082-f2c44c59a95e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 680.051831] env[63538]: DEBUG nova.compute.manager [req-33306bc8-d093-4622-9dc4-958186510b5c req-fc43905a-6229-4704-a69c-7aa3312d85e8 service nova] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Refreshing instance network info cache due to event network-changed-1f5998f7-6ddc-4ca5-9082-f2c44c59a95e. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 680.052092] env[63538]: DEBUG oslo_concurrency.lockutils [req-33306bc8-d093-4622-9dc4-958186510b5c req-fc43905a-6229-4704-a69c-7aa3312d85e8 service nova] Acquiring lock "refresh_cache-4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.052228] env[63538]: DEBUG oslo_concurrency.lockutils [req-33306bc8-d093-4622-9dc4-958186510b5c req-fc43905a-6229-4704-a69c-7aa3312d85e8 service nova] Acquired lock "refresh_cache-4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.052376] env[63538]: DEBUG nova.network.neutron [req-33306bc8-d093-4622-9dc4-958186510b5c req-fc43905a-6229-4704-a69c-7aa3312d85e8 service nova] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Refreshing network info cache for port 1f5998f7-6ddc-4ca5-9082-f2c44c59a95e {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 680.132224] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100616, 'name': ReconfigVM_Task, 'duration_secs': 0.495121} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.132410] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Reconfigured VM instance instance-0000001f to attach disk [datastore1] a7bb1869-5553-40d8-9c0b-366ccdef5fae/a7bb1869-5553-40d8-9c0b-366ccdef5fae.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 680.133180] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d9b1365-02f1-43dd-87b2-906a1a05643d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.140755] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 680.140755] env[63538]: value = "task-5100621" [ 680.140755] env[63538]: _type = "Task" [ 680.140755] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.149486] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100621, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.200231] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.706s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.201025] env[63538]: DEBUG nova.compute.manager [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 680.204117] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.573s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 680.204464] env[63538]: DEBUG nova.objects.instance [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lazy-loading 'resources' on Instance uuid c065263a-fd40-4b44-a68e-0e03248d0bc0 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 680.323259] env[63538]: DEBUG oslo_vmware.api [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100617, 'name': PowerOffVM_Task, 'duration_secs': 0.236401} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.323591] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 680.323782] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 680.324089] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3edf672-eee8-47ea-9d99-0a84c2f77ed4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.401368] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 680.401598] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 680.401987] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Deleting the datastore file [datastore2] ee9fe572-7a17-46db-8330-4b6f632c6b2c {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 680.402136] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0f95c835-ad73-4ec4-be7e-44bcad1c9f01 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.412233] env[63538]: DEBUG oslo_vmware.api [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 680.412233] env[63538]: value = "task-5100623" [ 680.412233] env[63538]: _type = "Task" [ 680.412233] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.423481] env[63538]: DEBUG oslo_vmware.api [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100623, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.425643] env[63538]: DEBUG oslo_concurrency.lockutils [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 680.439678] env[63538]: INFO nova.compute.manager [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Took 41.56 seconds to build instance. [ 680.473201] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100620, 'name': CreateVM_Task, 'duration_secs': 0.389826} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.473388] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 680.474206] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.474405] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.474792] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 680.475036] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c816580-cf17-482b-a6bd-e6ff0b3062f4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.480446] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Waiting for the task: (returnval){ [ 680.480446] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c9b352-4962-4d5c-ae5e-9de088e4c362" [ 680.480446] env[63538]: _type = "Task" [ 680.480446] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.491072] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c9b352-4962-4d5c-ae5e-9de088e4c362, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.652728] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100621, 'name': Rename_Task, 'duration_secs': 0.202204} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.653056] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 680.653285] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-72977506-b904-411d-ae6f-aa665a61dd44 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.660430] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 680.660430] env[63538]: value = "task-5100624" [ 680.660430] env[63538]: _type = "Task" [ 680.660430] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.669135] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100624, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.708131] env[63538]: DEBUG nova.compute.utils [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 680.724108] env[63538]: DEBUG nova.compute.manager [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 680.725020] env[63538]: DEBUG nova.network.neutron [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 680.822319] env[63538]: DEBUG nova.policy [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '78d5688727174c08a29ea7f3ac35e129', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cdfc96ac41be43f9ba0596444eb75737', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 680.869691] env[63538]: DEBUG nova.network.neutron [req-33306bc8-d093-4622-9dc4-958186510b5c req-fc43905a-6229-4704-a69c-7aa3312d85e8 service nova] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Updated VIF entry in instance network info cache for port 1f5998f7-6ddc-4ca5-9082-f2c44c59a95e. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 680.870118] env[63538]: DEBUG nova.network.neutron [req-33306bc8-d093-4622-9dc4-958186510b5c req-fc43905a-6229-4704-a69c-7aa3312d85e8 service nova] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Updating instance_info_cache with network_info: [{"id": "1f5998f7-6ddc-4ca5-9082-f2c44c59a95e", "address": "fa:16:3e:37:22:9d", "network": {"id": "8d832d66-6d5f-4524-a53f-f2d46e94a747", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-881199195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4ac96b6c57f41b9b897adcb8ed56904", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf63c3c8-d774-4b81-9b12-848612a96076", "external-id": "nsx-vlan-transportzone-315", "segmentation_id": 315, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f5998f7-6d", "ovs_interfaceid": "1f5998f7-6ddc-4ca5-9082-f2c44c59a95e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.924052] env[63538]: DEBUG oslo_vmware.api [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100623, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.19996} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.926868] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 680.927044] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 680.927636] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 680.927636] env[63538]: INFO nova.compute.manager [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Took 1.13 seconds to destroy the instance on the hypervisor. [ 680.927826] env[63538]: DEBUG oslo.service.loopingcall [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 680.928762] env[63538]: DEBUG nova.compute.manager [-] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 680.928888] env[63538]: DEBUG nova.network.neutron [-] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 680.941715] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3df51e88-de17-4d80-9eaa-84bf3a452670 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Lock "43729260-d138-4e62-9cc5-4db3ca39f5d2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.186s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.996965] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c9b352-4962-4d5c-ae5e-9de088e4c362, 'name': SearchDatastore_Task, 'duration_secs': 0.010065} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.997866] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 680.998279] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 680.998651] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.998927] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.999354] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 681.000027] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ef74f00-0eb7-4e0c-88fd-e22a19122bcf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.017498] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 681.017850] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 681.023241] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba658639-eb82-459e-af5e-9de7954db051 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.034238] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Waiting for the task: (returnval){ [ 681.034238] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b69de2-ab19-60b9-8b9a-e635cb9cc12c" [ 681.034238] env[63538]: _type = "Task" [ 681.034238] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.045065] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b69de2-ab19-60b9-8b9a-e635cb9cc12c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.173038] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100624, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.224067] env[63538]: DEBUG nova.compute.manager [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 681.376372] env[63538]: DEBUG oslo_concurrency.lockutils [req-33306bc8-d093-4622-9dc4-958186510b5c req-fc43905a-6229-4704-a69c-7aa3312d85e8 service nova] Releasing lock "refresh_cache-4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 681.415571] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3444936c-4802-44af-a2ae-a76231b64925 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.424628] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d85a26-8144-4eb0-a3e2-13431aa30fa2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.465844] env[63538]: DEBUG nova.compute.manager [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 681.469699] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f37b0b7-58a6-4c58-99a0-1f3270198b68 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.478463] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b80f331-1a20-4c9e-913e-857ff0e70357 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.483490] env[63538]: DEBUG nova.network.neutron [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Successfully created port: ed12b7c5-6f91-4211-b601-6494ca052b0b {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 681.497197] env[63538]: DEBUG nova.compute.provider_tree [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 681.544732] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b69de2-ab19-60b9-8b9a-e635cb9cc12c, 'name': SearchDatastore_Task, 'duration_secs': 0.020706} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.545946] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96d6ac32-b523-4581-9713-67a700f13bfb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.553268] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Waiting for the task: (returnval){ [ 681.553268] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cc22ea-b9d6-a4b3-1601-bdf521ae13e0" [ 681.553268] env[63538]: _type = "Task" [ 681.553268] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.561060] env[63538]: DEBUG nova.compute.manager [req-3213ce12-95cf-417f-bd70-5923e8c2ee13 req-c9200e2f-8768-41d9-95c2-be506f1e5883 service nova] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Received event network-changed-27c8d553-e481-41cf-9f67-20912d2adb46 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 681.561060] env[63538]: DEBUG nova.compute.manager [req-3213ce12-95cf-417f-bd70-5923e8c2ee13 req-c9200e2f-8768-41d9-95c2-be506f1e5883 service nova] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Refreshing instance network info cache due to event network-changed-27c8d553-e481-41cf-9f67-20912d2adb46. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 681.561383] env[63538]: DEBUG oslo_concurrency.lockutils [req-3213ce12-95cf-417f-bd70-5923e8c2ee13 req-c9200e2f-8768-41d9-95c2-be506f1e5883 service nova] Acquiring lock "refresh_cache-43729260-d138-4e62-9cc5-4db3ca39f5d2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.561653] env[63538]: DEBUG oslo_concurrency.lockutils [req-3213ce12-95cf-417f-bd70-5923e8c2ee13 req-c9200e2f-8768-41d9-95c2-be506f1e5883 service nova] Acquired lock "refresh_cache-43729260-d138-4e62-9cc5-4db3ca39f5d2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.561905] env[63538]: DEBUG nova.network.neutron [req-3213ce12-95cf-417f-bd70-5923e8c2ee13 req-c9200e2f-8768-41d9-95c2-be506f1e5883 service nova] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Refreshing network info cache for port 27c8d553-e481-41cf-9f67-20912d2adb46 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 681.572563] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cc22ea-b9d6-a4b3-1601-bdf521ae13e0, 'name': SearchDatastore_Task, 'duration_secs': 0.014111} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.573576] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 681.574095] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff/4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 681.574447] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dccac9cb-4d18-4759-b993-7379e0c452eb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.583686] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Waiting for the task: (returnval){ [ 681.583686] env[63538]: value = "task-5100625" [ 681.583686] env[63538]: _type = "Task" [ 681.583686] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.597147] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': task-5100625, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.674884] env[63538]: DEBUG oslo_vmware.api [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100624, 'name': PowerOnVM_Task, 'duration_secs': 0.640786} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.675203] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 681.675430] env[63538]: INFO nova.compute.manager [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Took 10.05 seconds to spawn the instance on the hypervisor. [ 681.676566] env[63538]: DEBUG nova.compute.manager [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 681.676566] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cacca8fc-b3b6-433d-9c92-e92cde802d4f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.771991] env[63538]: DEBUG nova.network.neutron [-] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.996998] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.001471] env[63538]: DEBUG nova.scheduler.client.report [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 682.095823] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': task-5100625, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.128738] env[63538]: DEBUG nova.compute.manager [req-0da986ae-b6c7-4513-b71d-538d891939cb req-bc8b7bee-8a10-4703-b01a-18c04377c753 service nova] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Received event network-vif-deleted-eb08b565-8194-4325-b89d-38b0f6615179 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 682.196635] env[63538]: INFO nova.compute.manager [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Took 34.20 seconds to build instance. [ 682.240650] env[63538]: DEBUG nova.compute.manager [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 682.275786] env[63538]: INFO nova.compute.manager [-] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Took 1.35 seconds to deallocate network for instance. [ 682.292117] env[63538]: DEBUG nova.virt.hardware [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 682.292117] env[63538]: DEBUG nova.virt.hardware [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 682.292117] env[63538]: DEBUG nova.virt.hardware [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 682.292319] env[63538]: DEBUG nova.virt.hardware [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 682.292319] env[63538]: DEBUG nova.virt.hardware [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 682.292319] env[63538]: DEBUG nova.virt.hardware [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 682.295358] env[63538]: DEBUG nova.virt.hardware [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 682.295570] env[63538]: DEBUG nova.virt.hardware [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 682.295813] env[63538]: DEBUG nova.virt.hardware [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 682.296055] env[63538]: DEBUG nova.virt.hardware [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 682.296294] env[63538]: DEBUG nova.virt.hardware [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 682.297654] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0672680a-5b20-4e83-a24a-fd59df0e0945 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.312156] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a7739f-3003-4dde-a256-fce323fccec2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.466346] env[63538]: DEBUG nova.network.neutron [req-3213ce12-95cf-417f-bd70-5923e8c2ee13 req-c9200e2f-8768-41d9-95c2-be506f1e5883 service nova] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Updated VIF entry in instance network info cache for port 27c8d553-e481-41cf-9f67-20912d2adb46. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 682.466727] env[63538]: DEBUG nova.network.neutron [req-3213ce12-95cf-417f-bd70-5923e8c2ee13 req-c9200e2f-8768-41d9-95c2-be506f1e5883 service nova] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Updating instance_info_cache with network_info: [{"id": "27c8d553-e481-41cf-9f67-20912d2adb46", "address": "fa:16:3e:fb:de:70", "network": {"id": "3f23850b-a1b9-429c-b4c8-e41786ac3f89", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1688699578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94d1cf2838014527bb9c399ae0cff7ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27c8d553-e4", "ovs_interfaceid": "27c8d553-e481-41cf-9f67-20912d2adb46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.513149] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.309s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 682.515926] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.819s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.515926] env[63538]: DEBUG nova.objects.instance [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Lazy-loading 'resources' on Instance uuid 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 682.536200] env[63538]: INFO nova.scheduler.client.report [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Deleted allocations for instance c065263a-fd40-4b44-a68e-0e03248d0bc0 [ 682.597477] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': task-5100625, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.624175} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.597764] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff/4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 682.597983] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 682.598579] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-858bf459-a2e1-49ed-9998-5c2733d1143d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.608529] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Waiting for the task: (returnval){ [ 682.608529] env[63538]: value = "task-5100626" [ 682.608529] env[63538]: _type = "Task" [ 682.608529] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.622765] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': task-5100626, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.699477] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ca224cde-965b-4142-80cf-3e23779207c9 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "a7bb1869-5553-40d8-9c0b-366ccdef5fae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.883s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 682.787091] env[63538]: DEBUG oslo_concurrency.lockutils [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.969474] env[63538]: DEBUG oslo_concurrency.lockutils [req-3213ce12-95cf-417f-bd70-5923e8c2ee13 req-c9200e2f-8768-41d9-95c2-be506f1e5883 service nova] Releasing lock "refresh_cache-43729260-d138-4e62-9cc5-4db3ca39f5d2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 683.051038] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3d7751ea-0a30-42a7-a2f1-44f28a059c10 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "c065263a-fd40-4b44-a68e-0e03248d0bc0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.893s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.125318] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': task-5100626, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076543} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.125507] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 683.126333] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a4bc00-080b-4a06-991a-c69f88a673e7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.149713] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff/4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 683.152891] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e9daaa76-28fc-4daa-89ab-1c042b55902c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.174229] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Waiting for the task: (returnval){ [ 683.174229] env[63538]: value = "task-5100627" [ 683.174229] env[63538]: _type = "Task" [ 683.174229] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.184673] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': task-5100627, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.203452] env[63538]: DEBUG nova.compute.manager [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 683.247238] env[63538]: DEBUG nova.network.neutron [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Successfully updated port: ed12b7c5-6f91-4211-b601-6494ca052b0b {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 683.685110] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': task-5100627, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.687199] env[63538]: DEBUG nova.compute.manager [req-5d99efb4-02be-4ca4-8bd0-0dc0f9c23039 req-bfa330ca-f062-4520-b948-900542013769 service nova] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Received event network-vif-plugged-ed12b7c5-6f91-4211-b601-6494ca052b0b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 683.687408] env[63538]: DEBUG oslo_concurrency.lockutils [req-5d99efb4-02be-4ca4-8bd0-0dc0f9c23039 req-bfa330ca-f062-4520-b948-900542013769 service nova] Acquiring lock "99de5226-a27c-47c5-90fa-5f0c7204df1c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 683.687607] env[63538]: DEBUG oslo_concurrency.lockutils [req-5d99efb4-02be-4ca4-8bd0-0dc0f9c23039 req-bfa330ca-f062-4520-b948-900542013769 service nova] Lock "99de5226-a27c-47c5-90fa-5f0c7204df1c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.687770] env[63538]: DEBUG oslo_concurrency.lockutils [req-5d99efb4-02be-4ca4-8bd0-0dc0f9c23039 req-bfa330ca-f062-4520-b948-900542013769 service nova] Lock "99de5226-a27c-47c5-90fa-5f0c7204df1c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.687938] env[63538]: DEBUG nova.compute.manager [req-5d99efb4-02be-4ca4-8bd0-0dc0f9c23039 req-bfa330ca-f062-4520-b948-900542013769 service nova] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] No waiting events found dispatching network-vif-plugged-ed12b7c5-6f91-4211-b601-6494ca052b0b {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 683.688117] env[63538]: WARNING nova.compute.manager [req-5d99efb4-02be-4ca4-8bd0-0dc0f9c23039 req-bfa330ca-f062-4520-b948-900542013769 service nova] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Received unexpected event network-vif-plugged-ed12b7c5-6f91-4211-b601-6494ca052b0b for instance with vm_state building and task_state spawning. [ 683.688317] env[63538]: DEBUG nova.compute.manager [req-5d99efb4-02be-4ca4-8bd0-0dc0f9c23039 req-bfa330ca-f062-4520-b948-900542013769 service nova] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Received event network-changed-ed12b7c5-6f91-4211-b601-6494ca052b0b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 683.688554] env[63538]: DEBUG nova.compute.manager [req-5d99efb4-02be-4ca4-8bd0-0dc0f9c23039 req-bfa330ca-f062-4520-b948-900542013769 service nova] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Refreshing instance network info cache due to event network-changed-ed12b7c5-6f91-4211-b601-6494ca052b0b. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 683.688812] env[63538]: DEBUG oslo_concurrency.lockutils [req-5d99efb4-02be-4ca4-8bd0-0dc0f9c23039 req-bfa330ca-f062-4520-b948-900542013769 service nova] Acquiring lock "refresh_cache-99de5226-a27c-47c5-90fa-5f0c7204df1c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 683.688981] env[63538]: DEBUG oslo_concurrency.lockutils [req-5d99efb4-02be-4ca4-8bd0-0dc0f9c23039 req-bfa330ca-f062-4520-b948-900542013769 service nova] Acquired lock "refresh_cache-99de5226-a27c-47c5-90fa-5f0c7204df1c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.689160] env[63538]: DEBUG nova.network.neutron [req-5d99efb4-02be-4ca4-8bd0-0dc0f9c23039 req-bfa330ca-f062-4520-b948-900542013769 service nova] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Refreshing network info cache for port ed12b7c5-6f91-4211-b601-6494ca052b0b {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 683.694299] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c7fd7d-dd9a-4784-b710-8042b769fb22 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.701789] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57bd2a61-18e4-454f-b6a9-17e516b4df18 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.743446] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6534e3e-e06a-46fa-bc7e-49350fb7ece3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.755445] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "refresh_cache-99de5226-a27c-47c5-90fa-5f0c7204df1c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 683.757899] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebd5e648-9f5f-4616-83d0-a415b1e6ca0f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.763813] env[63538]: DEBUG oslo_concurrency.lockutils [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 683.776649] env[63538]: DEBUG nova.compute.provider_tree [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 684.186481] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': task-5100627, 'name': ReconfigVM_Task, 'duration_secs': 0.916077} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.188048] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Reconfigured VM instance instance-00000020 to attach disk [datastore1] 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff/4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 684.188048] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b94067d3-aa4e-492d-a248-c02dbc646339 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.194690] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Waiting for the task: (returnval){ [ 684.194690] env[63538]: value = "task-5100628" [ 684.194690] env[63538]: _type = "Task" [ 684.194690] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.204989] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': task-5100628, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.230807] env[63538]: DEBUG nova.network.neutron [req-5d99efb4-02be-4ca4-8bd0-0dc0f9c23039 req-bfa330ca-f062-4520-b948-900542013769 service nova] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 684.279922] env[63538]: DEBUG nova.scheduler.client.report [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 684.324556] env[63538]: DEBUG nova.network.neutron [req-5d99efb4-02be-4ca4-8bd0-0dc0f9c23039 req-bfa330ca-f062-4520-b948-900542013769 service nova] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.706635] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': task-5100628, 'name': Rename_Task, 'duration_secs': 0.166479} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.706635] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 684.706635] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8b13775-15bf-47a3-88a3-b1e22f53611d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.712667] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Waiting for the task: (returnval){ [ 684.712667] env[63538]: value = "task-5100629" [ 684.712667] env[63538]: _type = "Task" [ 684.712667] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.720902] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': task-5100629, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.787927] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.272s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 684.790528] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.576s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 684.792026] env[63538]: INFO nova.compute.claims [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 684.817201] env[63538]: INFO nova.scheduler.client.report [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Deleted allocations for instance 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b [ 684.827428] env[63538]: DEBUG oslo_concurrency.lockutils [req-5d99efb4-02be-4ca4-8bd0-0dc0f9c23039 req-bfa330ca-f062-4520-b948-900542013769 service nova] Releasing lock "refresh_cache-99de5226-a27c-47c5-90fa-5f0c7204df1c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 684.827780] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquired lock "refresh_cache-99de5226-a27c-47c5-90fa-5f0c7204df1c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.827939] env[63538]: DEBUG nova.network.neutron [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 685.223374] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': task-5100629, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.324452] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d9ed1c99-b761-4753-bbcf-21d9f03bd1b4 tempest-ServerMetadataNegativeTestJSON-434414899 tempest-ServerMetadataNegativeTestJSON-434414899-project-member] Lock "7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.872s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 685.381419] env[63538]: DEBUG nova.network.neutron [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 685.727617] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': task-5100629, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.911203] env[63538]: DEBUG nova.network.neutron [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Updating instance_info_cache with network_info: [{"id": "ed12b7c5-6f91-4211-b601-6494ca052b0b", "address": "fa:16:3e:f2:4f:62", "network": {"id": "c4ac48b0-a0c2-4910-bcb9-3f42d415030f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2107782775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdfc96ac41be43f9ba0596444eb75737", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped12b7c5-6f", "ovs_interfaceid": "ed12b7c5-6f91-4211-b601-6494ca052b0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.225144] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': task-5100629, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.382186] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4af555c-4401-41a9-ab48-ac090373c78f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.389701] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89dd028f-abfc-4de8-a85a-c574905cab67 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.419198] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Releasing lock "refresh_cache-99de5226-a27c-47c5-90fa-5f0c7204df1c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.419483] env[63538]: DEBUG nova.compute.manager [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Instance network_info: |[{"id": "ed12b7c5-6f91-4211-b601-6494ca052b0b", "address": "fa:16:3e:f2:4f:62", "network": {"id": "c4ac48b0-a0c2-4910-bcb9-3f42d415030f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2107782775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdfc96ac41be43f9ba0596444eb75737", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped12b7c5-6f", "ovs_interfaceid": "ed12b7c5-6f91-4211-b601-6494ca052b0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 686.420066] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:4f:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c894ab55-c869-4530-9702-cb46d173ce94', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ed12b7c5-6f91-4211-b601-6494ca052b0b', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 686.427373] env[63538]: DEBUG oslo.service.loopingcall [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 686.428038] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c14614-a2fc-4680-a7b8-8171c97418fe {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.430579] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 686.430787] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f64388d8-58c5-4b34-a260-d63bd1cd112a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.451499] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d8cc55e-829c-4ecb-8780-a857e7e67dc4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.455504] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 686.455504] env[63538]: value = "task-5100630" [ 686.455504] env[63538]: _type = "Task" [ 686.455504] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.467195] env[63538]: DEBUG nova.compute.provider_tree [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 686.473937] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100630, 'name': CreateVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.730166] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': task-5100629, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.966543] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100630, 'name': CreateVM_Task, 'duration_secs': 0.470522} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.966729] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 686.967462] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 686.967631] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.968027] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 686.968307] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7108bd0b-1e27-44ef-82ac-45163d48e815 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.970572] env[63538]: DEBUG nova.scheduler.client.report [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 686.977643] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 686.977643] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52423c05-6b49-6e0a-44df-232470588cd1" [ 686.977643] env[63538]: _type = "Task" [ 686.977643] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.987627] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52423c05-6b49-6e0a-44df-232470588cd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.230117] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': task-5100629, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.476421] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.686s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.476992] env[63538]: DEBUG nova.compute.manager [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 687.479397] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.917s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.479616] env[63538]: DEBUG nova.objects.instance [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Lazy-loading 'resources' on Instance uuid 102c0463-fb64-4dda-914c-b98c8e9991ad {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 687.491586] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52423c05-6b49-6e0a-44df-232470588cd1, 'name': SearchDatastore_Task, 'duration_secs': 0.02145} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.491882] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 687.492125] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 687.492552] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 687.492635] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.492770] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 687.493073] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cd5eed15-55f4-47f5-87fc-f557710f2735 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.505094] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 687.505181] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 687.505979] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba7d14b3-d701-4cbc-8094-c257333792d2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.513303] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 687.513303] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e8fccc-11ba-a429-5576-86153c39bc82" [ 687.513303] env[63538]: _type = "Task" [ 687.513303] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.526937] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e8fccc-11ba-a429-5576-86153c39bc82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.729821] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': task-5100629, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.983656] env[63538]: DEBUG nova.compute.utils [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 687.988484] env[63538]: DEBUG nova.compute.manager [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 687.988666] env[63538]: DEBUG nova.network.neutron [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 688.028110] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e8fccc-11ba-a429-5576-86153c39bc82, 'name': SearchDatastore_Task, 'duration_secs': 0.014208} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.032296] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc6fe6a1-3cc8-4997-9507-75423d63593b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.038583] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 688.038583] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524aef44-22c9-31c3-ca6c-1ed9292c4bcf" [ 688.038583] env[63538]: _type = "Task" [ 688.038583] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.049560] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524aef44-22c9-31c3-ca6c-1ed9292c4bcf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.082618] env[63538]: DEBUG nova.policy [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '884364daa8f746fb9b32a8b33c9e2cfd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea05f3fb4676466bb2a286f5a2fefb8f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 688.229968] env[63538]: DEBUG oslo_vmware.api [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': task-5100629, 'name': PowerOnVM_Task, 'duration_secs': 3.016697} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.230340] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 688.230522] env[63538]: INFO nova.compute.manager [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Took 11.00 seconds to spawn the instance on the hypervisor. [ 688.230699] env[63538]: DEBUG nova.compute.manager [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 688.232898] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48cbec3e-8e5a-4fed-b8eb-51fe63445dfb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.485431] env[63538]: DEBUG nova.network.neutron [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Successfully created port: 416e5ec4-712e-489c-a33b-2fa922a4bfc9 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 688.489222] env[63538]: DEBUG nova.compute.manager [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 688.558159] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524aef44-22c9-31c3-ca6c-1ed9292c4bcf, 'name': SearchDatastore_Task, 'duration_secs': 0.013842} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.558159] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 688.558159] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 99de5226-a27c-47c5-90fa-5f0c7204df1c/99de5226-a27c-47c5-90fa-5f0c7204df1c.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 688.559416] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51417707-a5cf-4eed-a6cf-1c9c6c9e3edf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.567765] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 688.567765] env[63538]: value = "task-5100631" [ 688.567765] env[63538]: _type = "Task" [ 688.567765] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.582288] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100631, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.758277] env[63538]: INFO nova.compute.manager [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Took 37.60 seconds to build instance. [ 688.771793] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd03ec5d-c3d4-4e91-9347-09c0e80a2627 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.781539] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af928ba-d934-49a3-a595-bab78f5e520e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.819701] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f1c80f-28bc-462e-b8cb-ccc71935b1d7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.830205] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0641652-3cca-4b90-9174-ea57d58876c8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.847871] env[63538]: DEBUG nova.compute.provider_tree [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 689.090872] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100631, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.260657] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee069e38-793b-47d9-bc1a-ef3f036402c4 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Lock "4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.123s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.351451] env[63538]: DEBUG nova.scheduler.client.report [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 689.501167] env[63538]: DEBUG nova.compute.manager [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 689.541161] env[63538]: DEBUG nova.virt.hardware [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 689.541161] env[63538]: DEBUG nova.virt.hardware [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 689.541161] env[63538]: DEBUG nova.virt.hardware [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 689.541161] env[63538]: DEBUG nova.virt.hardware [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 689.541551] env[63538]: DEBUG nova.virt.hardware [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 689.541779] env[63538]: DEBUG nova.virt.hardware [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 689.542150] env[63538]: DEBUG nova.virt.hardware [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 689.543229] env[63538]: DEBUG nova.virt.hardware [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 689.546020] env[63538]: DEBUG nova.virt.hardware [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 689.546020] env[63538]: DEBUG nova.virt.hardware [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 689.546020] env[63538]: DEBUG nova.virt.hardware [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 689.546020] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8da63dd-66a6-47f9-beed-a4ff3ceb64c7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.554834] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8073cb56-1689-46b0-8a4c-06d0ee716170 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.589261] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100631, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.737658} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.589756] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 99de5226-a27c-47c5-90fa-5f0c7204df1c/99de5226-a27c-47c5-90fa-5f0c7204df1c.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 689.590058] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 689.590935] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6fb75eaa-8b3e-4142-8dd4-93995ad0944a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.599993] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 689.599993] env[63538]: value = "task-5100632" [ 689.599993] env[63538]: _type = "Task" [ 689.599993] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.613722] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100632, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.765920] env[63538]: DEBUG nova.compute.manager [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 689.859030] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.377s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.860303] env[63538]: DEBUG oslo_concurrency.lockutils [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.961s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 689.864725] env[63538]: DEBUG nova.objects.instance [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Lazy-loading 'resources' on Instance uuid a6bb8713-6b00-4a43-96b7-a84ee39d790d {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 689.891774] env[63538]: INFO nova.scheduler.client.report [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Deleted allocations for instance 102c0463-fb64-4dda-914c-b98c8e9991ad [ 690.113207] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100632, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077452} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.113517] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 690.114507] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ed9ce60-6975-4d54-9437-fc9fb7a44793 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.144160] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] 99de5226-a27c-47c5-90fa-5f0c7204df1c/99de5226-a27c-47c5-90fa-5f0c7204df1c.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 690.144459] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c058b77-c526-41c7-92f8-bfadcabd6de1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.167981] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 690.167981] env[63538]: value = "task-5100633" [ 690.167981] env[63538]: _type = "Task" [ 690.167981] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.177369] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100633, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.293416] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.409028] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a7c4834c-52af-4042-904b-70ab420d6b2d tempest-ServersV294TestFqdnHostnames-1833994765 tempest-ServersV294TestFqdnHostnames-1833994765-project-member] Lock "102c0463-fb64-4dda-914c-b98c8e9991ad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.644s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 690.688751] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100633, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.719044] env[63538]: DEBUG nova.compute.manager [req-8042fd34-45f3-4f5a-84e7-5dcdace6fe61 req-3488249c-94fc-4f9e-82e1-028485bc05c6 service nova] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Received event network-vif-plugged-416e5ec4-712e-489c-a33b-2fa922a4bfc9 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 690.719271] env[63538]: DEBUG oslo_concurrency.lockutils [req-8042fd34-45f3-4f5a-84e7-5dcdace6fe61 req-3488249c-94fc-4f9e-82e1-028485bc05c6 service nova] Acquiring lock "61068d41-5f5d-4ee5-b546-71da13eff93d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.720999] env[63538]: DEBUG oslo_concurrency.lockutils [req-8042fd34-45f3-4f5a-84e7-5dcdace6fe61 req-3488249c-94fc-4f9e-82e1-028485bc05c6 service nova] Lock "61068d41-5f5d-4ee5-b546-71da13eff93d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.720999] env[63538]: DEBUG oslo_concurrency.lockutils [req-8042fd34-45f3-4f5a-84e7-5dcdace6fe61 req-3488249c-94fc-4f9e-82e1-028485bc05c6 service nova] Lock "61068d41-5f5d-4ee5-b546-71da13eff93d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 690.720999] env[63538]: DEBUG nova.compute.manager [req-8042fd34-45f3-4f5a-84e7-5dcdace6fe61 req-3488249c-94fc-4f9e-82e1-028485bc05c6 service nova] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] No waiting events found dispatching network-vif-plugged-416e5ec4-712e-489c-a33b-2fa922a4bfc9 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 690.720999] env[63538]: WARNING nova.compute.manager [req-8042fd34-45f3-4f5a-84e7-5dcdace6fe61 req-3488249c-94fc-4f9e-82e1-028485bc05c6 service nova] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Received unexpected event network-vif-plugged-416e5ec4-712e-489c-a33b-2fa922a4bfc9 for instance with vm_state building and task_state spawning. [ 690.762029] env[63538]: DEBUG nova.network.neutron [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Successfully updated port: 416e5ec4-712e-489c-a33b-2fa922a4bfc9 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 690.975978] env[63538]: DEBUG oslo_concurrency.lockutils [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Acquiring lock "4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.975978] env[63538]: DEBUG oslo_concurrency.lockutils [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Lock "4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.975978] env[63538]: DEBUG oslo_concurrency.lockutils [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Acquiring lock "4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.975978] env[63538]: DEBUG oslo_concurrency.lockutils [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Lock "4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.976245] env[63538]: DEBUG oslo_concurrency.lockutils [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Lock "4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 690.981558] env[63538]: INFO nova.compute.manager [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Terminating instance [ 690.984141] env[63538]: DEBUG nova.compute.manager [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 690.984498] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 690.985816] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93334dc-2e1b-4de6-9948-63fd964edaf2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.997572] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 690.998530] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aed78ad5-2f87-48b1-afc9-63f43882281f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.009587] env[63538]: DEBUG oslo_vmware.api [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Waiting for the task: (returnval){ [ 691.009587] env[63538]: value = "task-5100634" [ 691.009587] env[63538]: _type = "Task" [ 691.009587] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.020534] env[63538]: DEBUG oslo_vmware.api [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': task-5100634, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.100764] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48e4ec32-e54e-40d7-ad27-02014d74b510 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.109283] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da22e11-c1aa-4aee-bad3-e2a3d26ca7ce {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.144370] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6209615-71fe-4cc6-b8ca-d1b5b40fb344 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.152945] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f02437-2a28-489b-93ed-f7c3d7f3928f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.177556] env[63538]: DEBUG nova.compute.provider_tree [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 691.192084] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100633, 'name': ReconfigVM_Task, 'duration_secs': 0.620647} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.193246] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Reconfigured VM instance instance-00000021 to attach disk [datastore1] 99de5226-a27c-47c5-90fa-5f0c7204df1c/99de5226-a27c-47c5-90fa-5f0c7204df1c.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 691.193935] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e2e7d907-114d-49d1-8e5c-14e7f04aab30 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.202325] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 691.202325] env[63538]: value = "task-5100635" [ 691.202325] env[63538]: _type = "Task" [ 691.202325] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.212658] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100635, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.269016] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "refresh_cache-61068d41-5f5d-4ee5-b546-71da13eff93d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.269016] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired lock "refresh_cache-61068d41-5f5d-4ee5-b546-71da13eff93d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.269242] env[63538]: DEBUG nova.network.neutron [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 691.521705] env[63538]: DEBUG oslo_vmware.api [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': task-5100634, 'name': PowerOffVM_Task, 'duration_secs': 0.275696} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.522047] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 691.522215] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 691.522675] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-36f00bf7-82a6-47ca-b1b2-99d015080ede {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.592299] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 691.592524] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 691.592713] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Deleting the datastore file [datastore1] 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 691.592986] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-304d0be1-4b16-41e5-9c1e-8b47390f3606 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.600876] env[63538]: DEBUG oslo_vmware.api [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Waiting for the task: (returnval){ [ 691.600876] env[63538]: value = "task-5100637" [ 691.600876] env[63538]: _type = "Task" [ 691.600876] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.609740] env[63538]: DEBUG oslo_vmware.api [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': task-5100637, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.715066] env[63538]: ERROR nova.scheduler.client.report [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] [req-e50a6d95-f468-4e4e-889b-eb2e6e362e75] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f65218a4-1d3d-476a-9093-01cae92c8635. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e50a6d95-f468-4e4e-889b-eb2e6e362e75"}]} [ 691.723656] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100635, 'name': Rename_Task, 'duration_secs': 0.201975} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.723890] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 691.724229] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-568f223a-66c8-4926-90ae-700b49bdf909 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.731574] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 691.731574] env[63538]: value = "task-5100638" [ 691.731574] env[63538]: _type = "Task" [ 691.731574] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.740619] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100638, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.744092] env[63538]: DEBUG nova.scheduler.client.report [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Refreshing inventories for resource provider f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 691.780033] env[63538]: DEBUG nova.scheduler.client.report [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Updating ProviderTree inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 691.780033] env[63538]: DEBUG nova.compute.provider_tree [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 691.796498] env[63538]: DEBUG nova.scheduler.client.report [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Refreshing aggregate associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, aggregates: None {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 691.811410] env[63538]: DEBUG nova.network.neutron [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 691.817946] env[63538]: DEBUG nova.scheduler.client.report [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Refreshing trait associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 692.014381] env[63538]: DEBUG nova.network.neutron [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Updating instance_info_cache with network_info: [{"id": "416e5ec4-712e-489c-a33b-2fa922a4bfc9", "address": "fa:16:3e:a4:c5:b7", "network": {"id": "4aa7976c-5900-4be2-a5e2-8d641ac24be9", "bridge": "br-int", "label": "tempest-ServersTestJSON-119331503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea05f3fb4676466bb2a286f5a2fefb8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap416e5ec4-71", "ovs_interfaceid": "416e5ec4-712e-489c-a33b-2fa922a4bfc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.115925] env[63538]: DEBUG oslo_vmware.api [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Task: {'id': task-5100637, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154267} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.119133] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 692.119133] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 692.119133] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 692.119133] env[63538]: INFO nova.compute.manager [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Took 1.13 seconds to destroy the instance on the hypervisor. [ 692.120570] env[63538]: DEBUG oslo.service.loopingcall [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 692.120570] env[63538]: DEBUG nova.compute.manager [-] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 692.120570] env[63538]: DEBUG nova.network.neutron [-] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 692.248230] env[63538]: DEBUG oslo_vmware.api [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100638, 'name': PowerOnVM_Task, 'duration_secs': 0.497161} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.248546] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 692.248724] env[63538]: INFO nova.compute.manager [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Took 10.01 seconds to spawn the instance on the hypervisor. [ 692.249465] env[63538]: DEBUG nova.compute.manager [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 692.249778] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a609b9c2-0def-426c-b92f-2fae19520c3f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.504328] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3be663-ee9d-4256-a384-edda79c86c1b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.520334] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Releasing lock "refresh_cache-61068d41-5f5d-4ee5-b546-71da13eff93d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 692.520334] env[63538]: DEBUG nova.compute.manager [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Instance network_info: |[{"id": "416e5ec4-712e-489c-a33b-2fa922a4bfc9", "address": "fa:16:3e:a4:c5:b7", "network": {"id": "4aa7976c-5900-4be2-a5e2-8d641ac24be9", "bridge": "br-int", "label": "tempest-ServersTestJSON-119331503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea05f3fb4676466bb2a286f5a2fefb8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap416e5ec4-71", "ovs_interfaceid": "416e5ec4-712e-489c-a33b-2fa922a4bfc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 692.522873] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f192e0-596b-4968-80b3-56f43ffb3c7d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.527529] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:c5:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '416e5ec4-712e-489c-a33b-2fa922a4bfc9', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 692.535640] env[63538]: DEBUG oslo.service.loopingcall [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 692.535891] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 692.536565] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d4f0bb75-5325-4dbc-9c24-cfe4d3a3dd94 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.597454] env[63538]: DEBUG nova.network.neutron [-] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.598097] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99b0b42-fdb3-4907-94c4-fb51f1463ecc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.603156] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 692.603156] env[63538]: value = "task-5100639" [ 692.603156] env[63538]: _type = "Task" [ 692.603156] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.612303] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c86c7e-2616-42d3-a701-8ecb5800d2c9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.624248] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100639, 'name': CreateVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.633541] env[63538]: DEBUG nova.compute.provider_tree [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 692.768681] env[63538]: INFO nova.compute.manager [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Took 36.31 seconds to build instance. [ 693.102377] env[63538]: INFO nova.compute.manager [-] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Took 0.98 seconds to deallocate network for instance. [ 693.127584] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100639, 'name': CreateVM_Task, 'duration_secs': 0.457656} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.130390] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 693.131626] env[63538]: DEBUG nova.compute.manager [req-3a7b97b9-0ade-485f-8fdb-de88a440bde5 req-ef684d02-b67c-4a7a-98fa-36726c857ac8 service nova] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Received event network-changed-416e5ec4-712e-489c-a33b-2fa922a4bfc9 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 693.131820] env[63538]: DEBUG nova.compute.manager [req-3a7b97b9-0ade-485f-8fdb-de88a440bde5 req-ef684d02-b67c-4a7a-98fa-36726c857ac8 service nova] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Refreshing instance network info cache due to event network-changed-416e5ec4-712e-489c-a33b-2fa922a4bfc9. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 693.132060] env[63538]: DEBUG oslo_concurrency.lockutils [req-3a7b97b9-0ade-485f-8fdb-de88a440bde5 req-ef684d02-b67c-4a7a-98fa-36726c857ac8 service nova] Acquiring lock "refresh_cache-61068d41-5f5d-4ee5-b546-71da13eff93d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.132205] env[63538]: DEBUG oslo_concurrency.lockutils [req-3a7b97b9-0ade-485f-8fdb-de88a440bde5 req-ef684d02-b67c-4a7a-98fa-36726c857ac8 service nova] Acquired lock "refresh_cache-61068d41-5f5d-4ee5-b546-71da13eff93d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.132414] env[63538]: DEBUG nova.network.neutron [req-3a7b97b9-0ade-485f-8fdb-de88a440bde5 req-ef684d02-b67c-4a7a-98fa-36726c857ac8 service nova] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Refreshing network info cache for port 416e5ec4-712e-489c-a33b-2fa922a4bfc9 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 693.133986] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.135502] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.135502] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 693.135502] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-702260ec-90cc-401c-961e-6d1390af6dd3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.138058] env[63538]: DEBUG nova.scheduler.client.report [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 693.145529] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 693.145529] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ab9388-246e-f3b7-67a8-d29b15fadb9f" [ 693.145529] env[63538]: _type = "Task" [ 693.145529] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.159058] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ab9388-246e-f3b7-67a8-d29b15fadb9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.277140] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2b11d01f-3d2a-49d5-92ef-3ebeb024cc87 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "99de5226-a27c-47c5-90fa-5f0c7204df1c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.573s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.620366] env[63538]: DEBUG oslo_concurrency.lockutils [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 693.646473] env[63538]: DEBUG oslo_concurrency.lockutils [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.786s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.649304] env[63538]: DEBUG oslo_concurrency.lockutils [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.888s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.649695] env[63538]: DEBUG nova.objects.instance [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Lazy-loading 'resources' on Instance uuid 543875b5-195a-476d-a0b4-3211ceefa27f {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 693.666121] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ab9388-246e-f3b7-67a8-d29b15fadb9f, 'name': SearchDatastore_Task, 'duration_secs': 0.01508} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.666613] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.666836] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 693.667075] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.667546] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.667546] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 693.667871] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-09040a1e-1af4-464e-abbf-8c9126efc462 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.681036] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 693.681343] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 693.681957] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00be1280-857d-4f1b-b1fa-f0eb9b56e093 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.690204] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 693.690204] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5268d10c-43f7-1af3-b7ec-371214ddebac" [ 693.690204] env[63538]: _type = "Task" [ 693.690204] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.698990] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5268d10c-43f7-1af3-b7ec-371214ddebac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.703424] env[63538]: INFO nova.scheduler.client.report [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Deleted allocations for instance a6bb8713-6b00-4a43-96b7-a84ee39d790d [ 693.780817] env[63538]: DEBUG nova.compute.manager [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 694.067420] env[63538]: DEBUG nova.network.neutron [req-3a7b97b9-0ade-485f-8fdb-de88a440bde5 req-ef684d02-b67c-4a7a-98fa-36726c857ac8 service nova] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Updated VIF entry in instance network info cache for port 416e5ec4-712e-489c-a33b-2fa922a4bfc9. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 694.068714] env[63538]: DEBUG nova.network.neutron [req-3a7b97b9-0ade-485f-8fdb-de88a440bde5 req-ef684d02-b67c-4a7a-98fa-36726c857ac8 service nova] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Updating instance_info_cache with network_info: [{"id": "416e5ec4-712e-489c-a33b-2fa922a4bfc9", "address": "fa:16:3e:a4:c5:b7", "network": {"id": "4aa7976c-5900-4be2-a5e2-8d641ac24be9", "bridge": "br-int", "label": "tempest-ServersTestJSON-119331503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea05f3fb4676466bb2a286f5a2fefb8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap416e5ec4-71", "ovs_interfaceid": "416e5ec4-712e-489c-a33b-2fa922a4bfc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.214676] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5268d10c-43f7-1af3-b7ec-371214ddebac, 'name': SearchDatastore_Task, 'duration_secs': 0.010432} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.214905] env[63538]: DEBUG oslo_concurrency.lockutils [None req-726c9c4f-4505-4514-b975-9d441ea3930f tempest-DeleteServersAdminTestJSON-1314143681 tempest-DeleteServersAdminTestJSON-1314143681-project-member] Lock "a6bb8713-6b00-4a43-96b7-a84ee39d790d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.373s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 694.218981] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fcf12d7-3756-4987-bec2-d72ed6975e37 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.225496] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 694.225496] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523c92d3-0a8f-69c4-66a2-1252393a6d75" [ 694.225496] env[63538]: _type = "Task" [ 694.225496] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.234870] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523c92d3-0a8f-69c4-66a2-1252393a6d75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.311286] env[63538]: DEBUG oslo_concurrency.lockutils [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.573402] env[63538]: DEBUG oslo_concurrency.lockutils [req-3a7b97b9-0ade-485f-8fdb-de88a440bde5 req-ef684d02-b67c-4a7a-98fa-36726c857ac8 service nova] Releasing lock "refresh_cache-61068d41-5f5d-4ee5-b546-71da13eff93d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 694.573402] env[63538]: DEBUG nova.compute.manager [req-3a7b97b9-0ade-485f-8fdb-de88a440bde5 req-ef684d02-b67c-4a7a-98fa-36726c857ac8 service nova] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Received event network-vif-deleted-1f5998f7-6ddc-4ca5-9082-f2c44c59a95e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 694.739263] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523c92d3-0a8f-69c4-66a2-1252393a6d75, 'name': SearchDatastore_Task, 'duration_secs': 0.025108} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.739263] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 694.741832] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 61068d41-5f5d-4ee5-b546-71da13eff93d/61068d41-5f5d-4ee5-b546-71da13eff93d.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 694.741832] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-40cbb6cc-fcc9-430d-ae92-82616cbb8e79 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.743793] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ff9106-b2ca-454f-ad74-17be2a98d1be {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.758578] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae8d976-f49d-4339-83aa-d4bd4b665604 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.763534] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 694.763534] env[63538]: value = "task-5100640" [ 694.763534] env[63538]: _type = "Task" [ 694.763534] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.803986] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e480507f-708d-4726-bab7-2e47333ab1f4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.809784] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100640, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.816742] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42eefb54-c87a-439b-9b59-fa2d7ef50954 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.834288] env[63538]: DEBUG nova.compute.provider_tree [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 694.979034] env[63538]: DEBUG oslo_concurrency.lockutils [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "d5d557c6-3d4e-4122-8756-218c9757fa01" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.979307] env[63538]: DEBUG oslo_concurrency.lockutils [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "d5d557c6-3d4e-4122-8756-218c9757fa01" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.280259] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100640, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514647} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.280259] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 61068d41-5f5d-4ee5-b546-71da13eff93d/61068d41-5f5d-4ee5-b546-71da13eff93d.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 695.280259] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 695.280259] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ffe68524-eeae-4ede-b7e6-eed3dba5a16e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.285999] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 695.285999] env[63538]: value = "task-5100641" [ 695.285999] env[63538]: _type = "Task" [ 695.285999] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.295491] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100641, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.337804] env[63538]: DEBUG nova.scheduler.client.report [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 695.797333] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100641, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.150166} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.798803] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 695.800754] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c715391-9961-4704-a587-5726e9cbc5d1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.826872] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] 61068d41-5f5d-4ee5-b546-71da13eff93d/61068d41-5f5d-4ee5-b546-71da13eff93d.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 695.827346] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e1c6586-6d22-4053-95e3-bc23d625cdf3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.846790] env[63538]: DEBUG oslo_concurrency.lockutils [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.197s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.849849] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.728s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.850465] env[63538]: DEBUG nova.objects.instance [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Lazy-loading 'resources' on Instance uuid e32789d5-59ba-4657-9a9c-84fc9bd6cfdf {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 695.860499] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 695.860499] env[63538]: value = "task-5100642" [ 695.860499] env[63538]: _type = "Task" [ 695.860499] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.869524] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100642, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.875389] env[63538]: INFO nova.scheduler.client.report [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Deleted allocations for instance 543875b5-195a-476d-a0b4-3211ceefa27f [ 696.373375] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100642, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.400245] env[63538]: DEBUG oslo_concurrency.lockutils [None req-02640fc3-750b-4b03-9b8d-cbdf457ed892 tempest-ServersTestManualDisk-1352659055 tempest-ServersTestManualDisk-1352659055-project-member] Lock "543875b5-195a-476d-a0b4-3211ceefa27f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.439s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 696.879415] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100642, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.990054] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-914d3fb2-45c6-4410-a8a0-57eaae5d4337 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.999125] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d435078a-acf7-4b79-8701-7d878159a8d4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.032276] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a9f1f8-4e66-43f9-bf9f-316932ec5ec0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.042113] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b58237-a56d-4a51-b50e-2a2665e2747c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.058738] env[63538]: DEBUG nova.compute.provider_tree [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 697.375013] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100642, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.588405] env[63538]: ERROR nova.scheduler.client.report [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [req-416c612a-73ad-4b9c-92a7-bd481cb4cc29] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f65218a4-1d3d-476a-9093-01cae92c8635. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-416c612a-73ad-4b9c-92a7-bd481cb4cc29"}]} [ 697.609830] env[63538]: DEBUG nova.scheduler.client.report [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Refreshing inventories for resource provider f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 697.629265] env[63538]: DEBUG nova.scheduler.client.report [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Updating ProviderTree inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 697.629265] env[63538]: DEBUG nova.compute.provider_tree [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 697.642684] env[63538]: DEBUG nova.scheduler.client.report [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Refreshing aggregate associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, aggregates: None {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 697.665976] env[63538]: DEBUG nova.scheduler.client.report [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Refreshing trait associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 697.878997] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100642, 'name': ReconfigVM_Task, 'duration_secs': 1.9801} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.882267] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Reconfigured VM instance instance-00000023 to attach disk [datastore1] 61068d41-5f5d-4ee5-b546-71da13eff93d/61068d41-5f5d-4ee5-b546-71da13eff93d.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 697.886012] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-999f66d2-acfb-45a7-a559-15b01bbf5024 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.897180] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 697.897180] env[63538]: value = "task-5100643" [ 697.897180] env[63538]: _type = "Task" [ 697.897180] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.912826] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100643, 'name': Rename_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.116475] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9d39a7dc-7841-4eae-8602-d6db2340beee tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "b5593b74-fe89-43f5-a8c6-e73159b4efac" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 698.117720] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9d39a7dc-7841-4eae-8602-d6db2340beee tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "b5593b74-fe89-43f5-a8c6-e73159b4efac" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 698.117992] env[63538]: DEBUG nova.compute.manager [None req-9d39a7dc-7841-4eae-8602-d6db2340beee tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 698.118922] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5999617-9ae7-46bc-81e7-e69d7d183276 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.133750] env[63538]: DEBUG nova.compute.manager [None req-9d39a7dc-7841-4eae-8602-d6db2340beee tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63538) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 698.134239] env[63538]: DEBUG nova.objects.instance [None req-9d39a7dc-7841-4eae-8602-d6db2340beee tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lazy-loading 'flavor' on Instance uuid b5593b74-fe89-43f5-a8c6-e73159b4efac {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 698.342807] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4673c7c-f66b-4bef-94b8-17edd33e1b23 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.353087] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9623e8a-4b12-44be-935e-f1f43039b122 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.385952] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa876316-8f32-4599-a2dc-576da9e5eb87 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.394848] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b961ed-ddbc-450d-be82-000e9fe239ce {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.417576] env[63538]: DEBUG nova.compute.provider_tree [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 698.422053] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100643, 'name': Rename_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.643611] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d39a7dc-7841-4eae-8602-d6db2340beee tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 698.643611] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9e338561-3623-48b0-b74d-d2973a58d17a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.651954] env[63538]: DEBUG oslo_vmware.api [None req-9d39a7dc-7841-4eae-8602-d6db2340beee tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 698.651954] env[63538]: value = "task-5100644" [ 698.651954] env[63538]: _type = "Task" [ 698.651954] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.661370] env[63538]: DEBUG oslo_vmware.api [None req-9d39a7dc-7841-4eae-8602-d6db2340beee tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100644, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.912393] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100643, 'name': Rename_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.970155] env[63538]: DEBUG nova.scheduler.client.report [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Updated inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 with generation 62 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 698.970155] env[63538]: DEBUG nova.compute.provider_tree [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Updating resource provider f65218a4-1d3d-476a-9093-01cae92c8635 generation from 62 to 63 during operation: update_inventory {{(pid=63538) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 698.970155] env[63538]: DEBUG nova.compute.provider_tree [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 699.165427] env[63538]: DEBUG oslo_vmware.api [None req-9d39a7dc-7841-4eae-8602-d6db2340beee tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100644, 'name': PowerOffVM_Task, 'duration_secs': 0.225518} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.165427] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d39a7dc-7841-4eae-8602-d6db2340beee tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 699.165542] env[63538]: DEBUG nova.compute.manager [None req-9d39a7dc-7841-4eae-8602-d6db2340beee tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 699.166615] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1a7b14-5305-408c-8e18-f01c6be69a9d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.415404] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100643, 'name': Rename_Task, 'duration_secs': 1.331112} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.415745] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 699.415745] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb6a70b1-b501-4258-b33a-749c99ddb31f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.427841] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 699.427841] env[63538]: value = "task-5100645" [ 699.427841] env[63538]: _type = "Task" [ 699.427841] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.437843] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100645, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.475506] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.625s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.479304] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.381s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.483839] env[63538]: INFO nova.compute.claims [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 699.517638] env[63538]: INFO nova.scheduler.client.report [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Deleted allocations for instance e32789d5-59ba-4657-9a9c-84fc9bd6cfdf [ 699.681894] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9d39a7dc-7841-4eae-8602-d6db2340beee tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "b5593b74-fe89-43f5-a8c6-e73159b4efac" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.564s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.944361] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100645, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.029734] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a95c1d4d-65a1-4087-bbc8-3a65c210a47c tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Lock "e32789d5-59ba-4657-9a9c-84fc9bd6cfdf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.378s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 700.443110] env[63538]: DEBUG oslo_vmware.api [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100645, 'name': PowerOnVM_Task, 'duration_secs': 0.681403} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.444079] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 700.444885] env[63538]: INFO nova.compute.manager [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Took 10.94 seconds to spawn the instance on the hypervisor. [ 700.444885] env[63538]: DEBUG nova.compute.manager [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 700.445803] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6515a255-d2fb-4cda-abcf-ceda6ee4e09d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.776548] env[63538]: DEBUG nova.objects.instance [None req-c308d57c-edff-4888-a1d3-1917e17ed71e tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lazy-loading 'flavor' on Instance uuid b5593b74-fe89-43f5-a8c6-e73159b4efac {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 700.976060] env[63538]: INFO nova.compute.manager [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Took 38.79 seconds to build instance. [ 701.130275] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e45a5019-66f5-4f5b-a435-c87fcb2661da {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.138990] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d1b3b3-24c4-4bb6-9032-f8e3cd81adc2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.174641] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58144958-957b-4d88-b500-c6bac01adb11 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.190046] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-565afa39-03c6-46ca-9ab5-31f705c7a527 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.212941] env[63538]: DEBUG nova.compute.provider_tree [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 701.283527] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c308d57c-edff-4888-a1d3-1917e17ed71e tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "refresh_cache-b5593b74-fe89-43f5-a8c6-e73159b4efac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 701.284821] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c308d57c-edff-4888-a1d3-1917e17ed71e tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquired lock "refresh_cache-b5593b74-fe89-43f5-a8c6-e73159b4efac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.284821] env[63538]: DEBUG nova.network.neutron [None req-c308d57c-edff-4888-a1d3-1917e17ed71e tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 701.284821] env[63538]: DEBUG nova.objects.instance [None req-c308d57c-edff-4888-a1d3-1917e17ed71e tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lazy-loading 'info_cache' on Instance uuid b5593b74-fe89-43f5-a8c6-e73159b4efac {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 701.480354] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a923c9f7-9bfd-4ca9-a0d9-20df7540b253 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "61068d41-5f5d-4ee5-b546-71da13eff93d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.031s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.686281] env[63538]: DEBUG oslo_concurrency.lockutils [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Acquiring lock "36d40b69-fae7-4867-afa1-4befdc96bde0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 701.686563] env[63538]: DEBUG oslo_concurrency.lockutils [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Lock "36d40b69-fae7-4867-afa1-4befdc96bde0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 701.686769] env[63538]: DEBUG oslo_concurrency.lockutils [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Acquiring lock "36d40b69-fae7-4867-afa1-4befdc96bde0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 701.686953] env[63538]: DEBUG oslo_concurrency.lockutils [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Lock "36d40b69-fae7-4867-afa1-4befdc96bde0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 701.687133] env[63538]: DEBUG oslo_concurrency.lockutils [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Lock "36d40b69-fae7-4867-afa1-4befdc96bde0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.690086] env[63538]: INFO nova.compute.manager [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Terminating instance [ 701.692246] env[63538]: DEBUG nova.compute.manager [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 701.692443] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 701.693357] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7845b09-7acc-4ae6-a9de-3feaed80b1a3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.701926] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 701.702218] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea58788c-2a40-4731-9c02-6ad991432163 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.709138] env[63538]: DEBUG oslo_vmware.api [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Waiting for the task: (returnval){ [ 701.709138] env[63538]: value = "task-5100646" [ 701.709138] env[63538]: _type = "Task" [ 701.709138] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.713589] env[63538]: DEBUG nova.scheduler.client.report [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 701.725499] env[63538]: DEBUG oslo_vmware.api [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100646, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.788357] env[63538]: DEBUG nova.objects.base [None req-c308d57c-edff-4888-a1d3-1917e17ed71e tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=63538) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 701.982826] env[63538]: DEBUG nova.compute.manager [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 702.221621] env[63538]: DEBUG oslo_vmware.api [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100646, 'name': PowerOffVM_Task, 'duration_secs': 0.225584} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.223064] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 702.223287] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 702.223558] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-41f9a57d-a0e3-474c-92b1-8f8cc5cc6d40 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.226198] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.747s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.226681] env[63538]: DEBUG nova.compute.manager [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 702.229178] env[63538]: DEBUG oslo_concurrency.lockutils [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.357s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.230484] env[63538]: INFO nova.compute.claims [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 702.289621] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 702.289986] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 702.290237] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Deleting the datastore file [datastore1] 36d40b69-fae7-4867-afa1-4befdc96bde0 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 702.293750] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-56812a97-1c7f-4e7a-b37b-f78c6a6053cd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.303906] env[63538]: DEBUG oslo_vmware.api [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Waiting for the task: (returnval){ [ 702.303906] env[63538]: value = "task-5100648" [ 702.303906] env[63538]: _type = "Task" [ 702.303906] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.318713] env[63538]: DEBUG oslo_vmware.api [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100648, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.509174] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 702.605223] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "61068d41-5f5d-4ee5-b546-71da13eff93d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 702.605223] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "61068d41-5f5d-4ee5-b546-71da13eff93d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.605223] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "61068d41-5f5d-4ee5-b546-71da13eff93d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 702.605384] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "61068d41-5f5d-4ee5-b546-71da13eff93d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.605545] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "61068d41-5f5d-4ee5-b546-71da13eff93d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.612520] env[63538]: INFO nova.compute.manager [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Terminating instance [ 702.616886] env[63538]: DEBUG nova.compute.manager [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 702.617098] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 702.618746] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7eb23ae-4e87-4b11-902c-ae046fa7cb61 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.628165] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 702.628444] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c42042f7-9e3b-43b4-aacb-8af45163b02a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.639030] env[63538]: DEBUG oslo_vmware.api [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 702.639030] env[63538]: value = "task-5100649" [ 702.639030] env[63538]: _type = "Task" [ 702.639030] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.650325] env[63538]: DEBUG oslo_vmware.api [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100649, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.734857] env[63538]: DEBUG nova.compute.utils [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 702.738435] env[63538]: DEBUG nova.compute.manager [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 702.738568] env[63538]: DEBUG nova.network.neutron [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 702.819032] env[63538]: DEBUG oslo_vmware.api [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Task: {'id': task-5100648, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.335883} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.819388] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 702.819665] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 702.819909] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 702.820252] env[63538]: INFO nova.compute.manager [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Took 1.13 seconds to destroy the instance on the hypervisor. [ 702.820604] env[63538]: DEBUG oslo.service.loopingcall [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 702.820849] env[63538]: DEBUG nova.compute.manager [-] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 702.820957] env[63538]: DEBUG nova.network.neutron [-] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 702.857310] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Acquiring lock "f703cd1c-4b77-4a85-a91b-63a2bd0e84a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 702.857310] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Lock "f703cd1c-4b77-4a85-a91b-63a2bd0e84a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.949605] env[63538]: DEBUG nova.network.neutron [None req-c308d57c-edff-4888-a1d3-1917e17ed71e tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Updating instance_info_cache with network_info: [{"id": "dc774ce7-f5aa-452a-828d-e56e0339fe56", "address": "fa:16:3e:3c:f8:6c", "network": {"id": "c4ac48b0-a0c2-4910-bcb9-3f42d415030f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2107782775-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdfc96ac41be43f9ba0596444eb75737", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc774ce7-f5", "ovs_interfaceid": "dc774ce7-f5aa-452a-828d-e56e0339fe56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.959493] env[63538]: DEBUG nova.policy [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '21f98d3c77454d809c0aa1ca5c7dc6d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '492427e54e1048f292dab2abdac71af5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 703.152014] env[63538]: DEBUG oslo_vmware.api [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100649, 'name': PowerOffVM_Task, 'duration_secs': 0.240174} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.152293] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 703.152455] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 703.152935] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c3e0f365-55ce-4d16-bec0-8b3a8bfdff9a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.229027] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 703.229027] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 703.229027] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Deleting the datastore file [datastore1] 61068d41-5f5d-4ee5-b546-71da13eff93d {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 703.229027] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e55f0658-ca77-4ea2-8462-9056715afa94 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.242021] env[63538]: DEBUG nova.compute.manager [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 703.248698] env[63538]: DEBUG oslo_vmware.api [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 703.248698] env[63538]: value = "task-5100651" [ 703.248698] env[63538]: _type = "Task" [ 703.248698] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.260524] env[63538]: DEBUG oslo_vmware.api [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100651, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.458050] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c308d57c-edff-4888-a1d3-1917e17ed71e tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Releasing lock "refresh_cache-b5593b74-fe89-43f5-a8c6-e73159b4efac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 703.565328] env[63538]: DEBUG nova.network.neutron [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Successfully created port: 719964b3-d739-46b8-ae43-c589419299a1 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 703.599466] env[63538]: DEBUG nova.compute.manager [req-66dbe33d-4436-4c54-be34-4a18f5b66e2f req-f1829146-09c4-492b-9787-90c85adcf420 service nova] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Received event network-vif-deleted-ac528a58-339f-4621-890a-afe5e3ee634d {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 703.599575] env[63538]: INFO nova.compute.manager [req-66dbe33d-4436-4c54-be34-4a18f5b66e2f req-f1829146-09c4-492b-9787-90c85adcf420 service nova] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Neutron deleted interface ac528a58-339f-4621-890a-afe5e3ee634d; detaching it from the instance and deleting it from the info cache [ 703.599713] env[63538]: DEBUG nova.network.neutron [req-66dbe33d-4436-4c54-be34-4a18f5b66e2f req-f1829146-09c4-492b-9787-90c85adcf420 service nova] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.766181] env[63538]: DEBUG oslo_vmware.api [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100651, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146171} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.769483] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 703.769693] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 703.769868] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 703.770084] env[63538]: INFO nova.compute.manager [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Took 1.15 seconds to destroy the instance on the hypervisor. [ 703.770307] env[63538]: DEBUG oslo.service.loopingcall [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 703.770739] env[63538]: DEBUG nova.compute.manager [-] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 703.770840] env[63538]: DEBUG nova.network.neutron [-] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 703.883309] env[63538]: DEBUG nova.network.neutron [-] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.964031] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c308d57c-edff-4888-a1d3-1917e17ed71e tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 703.964031] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f2c01ef4-03ae-46c2-95ab-e8a167948eae {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.973916] env[63538]: DEBUG oslo_vmware.api [None req-c308d57c-edff-4888-a1d3-1917e17ed71e tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 703.973916] env[63538]: value = "task-5100652" [ 703.973916] env[63538]: _type = "Task" [ 703.973916] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.988918] env[63538]: DEBUG oslo_vmware.api [None req-c308d57c-edff-4888-a1d3-1917e17ed71e tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100652, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.017328] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-279bbbf0-42d6-4a1a-abe6-bd42fd235983 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.027438] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b6940c-0da0-4d03-a94d-685c655006ca {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.065020] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc5d6cff-1327-4c9c-a450-0e7730ab15c6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.074467] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ba8785-35a6-4c0a-a97d-5ae7727b9e26 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.091090] env[63538]: DEBUG nova.compute.provider_tree [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 704.104050] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7322f65c-c07c-4dec-824a-918991d4c6df {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.116454] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45d57416-c85d-49bf-bbc9-73581ef39987 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.167950] env[63538]: DEBUG nova.compute.manager [req-66dbe33d-4436-4c54-be34-4a18f5b66e2f req-f1829146-09c4-492b-9787-90c85adcf420 service nova] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Detach interface failed, port_id=ac528a58-339f-4621-890a-afe5e3ee634d, reason: Instance 36d40b69-fae7-4867-afa1-4befdc96bde0 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 704.259247] env[63538]: DEBUG nova.compute.manager [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 704.311687] env[63538]: DEBUG nova.virt.hardware [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 704.312019] env[63538]: DEBUG nova.virt.hardware [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 704.312356] env[63538]: DEBUG nova.virt.hardware [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 704.312646] env[63538]: DEBUG nova.virt.hardware [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 704.313495] env[63538]: DEBUG nova.virt.hardware [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 704.313495] env[63538]: DEBUG nova.virt.hardware [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 704.313495] env[63538]: DEBUG nova.virt.hardware [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 704.313690] env[63538]: DEBUG nova.virt.hardware [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 704.313768] env[63538]: DEBUG nova.virt.hardware [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 704.313926] env[63538]: DEBUG nova.virt.hardware [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 704.314156] env[63538]: DEBUG nova.virt.hardware [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 704.315393] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc3f4263-7ddd-4540-bb51-8333ed6a6ed0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.328984] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-215b5bb2-1803-4bce-a897-eaec12571173 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.388904] env[63538]: INFO nova.compute.manager [-] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Took 1.57 seconds to deallocate network for instance. [ 704.489720] env[63538]: DEBUG oslo_vmware.api [None req-c308d57c-edff-4888-a1d3-1917e17ed71e tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100652, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.617817] env[63538]: ERROR nova.scheduler.client.report [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [req-78ccca38-059a-48e1-a270-7cd33eea5d9c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f65218a4-1d3d-476a-9093-01cae92c8635. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-78ccca38-059a-48e1-a270-7cd33eea5d9c"}]} [ 704.642452] env[63538]: DEBUG nova.scheduler.client.report [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Refreshing inventories for resource provider f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 704.670344] env[63538]: DEBUG nova.scheduler.client.report [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Updating ProviderTree inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 704.670568] env[63538]: DEBUG nova.compute.provider_tree [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 704.687628] env[63538]: DEBUG nova.scheduler.client.report [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Refreshing aggregate associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, aggregates: None {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 704.713033] env[63538]: DEBUG nova.scheduler.client.report [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Refreshing trait associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 704.836792] env[63538]: DEBUG nova.network.neutron [-] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.897788] env[63538]: DEBUG oslo_concurrency.lockutils [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.992157] env[63538]: DEBUG oslo_vmware.api [None req-c308d57c-edff-4888-a1d3-1917e17ed71e tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100652, 'name': PowerOnVM_Task, 'duration_secs': 0.617941} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.992634] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c308d57c-edff-4888-a1d3-1917e17ed71e tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 704.992758] env[63538]: DEBUG nova.compute.manager [None req-c308d57c-edff-4888-a1d3-1917e17ed71e tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 704.994051] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a70ee0f8-dd44-441f-9894-5025ed8287bb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.339858] env[63538]: INFO nova.compute.manager [-] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Took 1.57 seconds to deallocate network for instance. [ 705.403383] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-225cb794-49fb-4529-b5fe-5a1629c3b6ba {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.412538] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd389b88-c3e8-4497-8596-2d7afb2a45bd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.449775] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c28366e3-66c4-42b4-bab0-9c93ea9dc09a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.459085] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131c2845-0990-4ac6-862b-2eb5d3ac80da {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.474815] env[63538]: DEBUG nova.compute.provider_tree [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 705.853372] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.880123] env[63538]: DEBUG nova.network.neutron [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Successfully updated port: 719964b3-d739-46b8-ae43-c589419299a1 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 705.948777] env[63538]: DEBUG nova.compute.manager [req-ee48cbd1-7c83-4c1e-aa79-6bd51d4dfde9 req-d22efbc3-31c7-4c61-a959-603ce1e2b05b service nova] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Received event network-vif-deleted-416e5ec4-712e-489c-a33b-2fa922a4bfc9 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 705.967311] env[63538]: DEBUG nova.compute.manager [req-0009c9ca-27cd-470f-bd6d-dd05d8b1c766 req-1dc2ad68-22bb-4d5f-8b46-43de09f8a90b service nova] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Received event network-vif-plugged-719964b3-d739-46b8-ae43-c589419299a1 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 705.967522] env[63538]: DEBUG oslo_concurrency.lockutils [req-0009c9ca-27cd-470f-bd6d-dd05d8b1c766 req-1dc2ad68-22bb-4d5f-8b46-43de09f8a90b service nova] Acquiring lock "1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.967723] env[63538]: DEBUG oslo_concurrency.lockutils [req-0009c9ca-27cd-470f-bd6d-dd05d8b1c766 req-1dc2ad68-22bb-4d5f-8b46-43de09f8a90b service nova] Lock "1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 705.967881] env[63538]: DEBUG oslo_concurrency.lockutils [req-0009c9ca-27cd-470f-bd6d-dd05d8b1c766 req-1dc2ad68-22bb-4d5f-8b46-43de09f8a90b service nova] Lock "1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 705.969072] env[63538]: DEBUG nova.compute.manager [req-0009c9ca-27cd-470f-bd6d-dd05d8b1c766 req-1dc2ad68-22bb-4d5f-8b46-43de09f8a90b service nova] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] No waiting events found dispatching network-vif-plugged-719964b3-d739-46b8-ae43-c589419299a1 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 705.969776] env[63538]: WARNING nova.compute.manager [req-0009c9ca-27cd-470f-bd6d-dd05d8b1c766 req-1dc2ad68-22bb-4d5f-8b46-43de09f8a90b service nova] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Received unexpected event network-vif-plugged-719964b3-d739-46b8-ae43-c589419299a1 for instance with vm_state building and task_state spawning. [ 706.022496] env[63538]: DEBUG nova.scheduler.client.report [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Updated inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 with generation 64 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 706.022746] env[63538]: DEBUG nova.compute.provider_tree [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Updating resource provider f65218a4-1d3d-476a-9093-01cae92c8635 generation from 64 to 65 during operation: update_inventory {{(pid=63538) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 706.022917] env[63538]: DEBUG nova.compute.provider_tree [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 706.383980] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "refresh_cache-1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 706.383980] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired lock "refresh_cache-1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.384262] env[63538]: DEBUG nova.network.neutron [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 706.529554] env[63538]: DEBUG oslo_concurrency.lockutils [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.300s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 706.530026] env[63538]: DEBUG nova.compute.manager [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 706.535019] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.959s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 706.535019] env[63538]: DEBUG nova.objects.instance [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Lazy-loading 'resources' on Instance uuid bf54098e-91a8-403f-a6fe-b58a62daaadb {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 706.932055] env[63538]: DEBUG nova.network.neutron [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 707.041233] env[63538]: DEBUG nova.compute.utils [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 707.052254] env[63538]: DEBUG nova.compute.manager [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Not allocating networking since 'none' was specified. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 707.168684] env[63538]: DEBUG nova.network.neutron [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Updating instance_info_cache with network_info: [{"id": "719964b3-d739-46b8-ae43-c589419299a1", "address": "fa:16:3e:be:ca:ae", "network": {"id": "01450801-f549-4844-80bd-208ec405c65f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684310085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "492427e54e1048f292dab2abdac71af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap719964b3-d7", "ovs_interfaceid": "719964b3-d739-46b8-ae43-c589419299a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.556337] env[63538]: DEBUG nova.compute.manager [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 707.655027] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2893937-11d2-402e-b8d4-6be5333752b0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.664708] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17178862-ff4e-44a9-9bdd-8c494fc92e1d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.672554] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Releasing lock "refresh_cache-1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.672867] env[63538]: DEBUG nova.compute.manager [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Instance network_info: |[{"id": "719964b3-d739-46b8-ae43-c589419299a1", "address": "fa:16:3e:be:ca:ae", "network": {"id": "01450801-f549-4844-80bd-208ec405c65f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684310085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "492427e54e1048f292dab2abdac71af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap719964b3-d7", "ovs_interfaceid": "719964b3-d739-46b8-ae43-c589419299a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 707.699239] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:ca:ae', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32463b6d-4569-4755-8a29-873a028690a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '719964b3-d739-46b8-ae43-c589419299a1', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 707.706922] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Creating folder: Project (492427e54e1048f292dab2abdac71af5). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 707.707879] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e64da388-c767-4560-8a1d-9c19c92e8edc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.710272] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b56ee299-d4d0-4b37-80b0-ece9e31da494 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.719071] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e0afe3-6c46-48fe-9212-6323a842e0e9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.724509] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Created folder: Project (492427e54e1048f292dab2abdac71af5) in parent group-v992234. [ 707.724727] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Creating folder: Instances. Parent ref: group-v992323. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 707.725441] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eda74da5-c4ed-430a-962c-eabe699d1de6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.737681] env[63538]: DEBUG nova.compute.provider_tree [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 707.742188] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Created folder: Instances in parent group-v992323. [ 707.742188] env[63538]: DEBUG oslo.service.loopingcall [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 707.742188] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 707.742188] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a43e4c57-2b00-4bb9-aad3-fcf57baa492d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.765180] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 707.765180] env[63538]: value = "task-5100655" [ 707.765180] env[63538]: _type = "Task" [ 707.765180] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.774688] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100655, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.244097] env[63538]: DEBUG nova.scheduler.client.report [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 708.275651] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100655, 'name': CreateVM_Task, 'duration_secs': 0.401767} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.275829] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 708.276563] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.276739] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.277085] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 708.277369] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6edd8269-cba4-47ec-a984-198ad6d84b08 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.283549] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 708.283549] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52472a10-bcac-a164-cf64-f5d9dca746f1" [ 708.283549] env[63538]: _type = "Task" [ 708.283549] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.293987] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52472a10-bcac-a164-cf64-f5d9dca746f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.422628] env[63538]: DEBUG nova.compute.manager [req-d9da931a-562c-42a7-82a5-db959b9ddce6 req-80e6b6d5-2b59-4ea2-bec5-0421f43484cc service nova] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Received event network-changed-719964b3-d739-46b8-ae43-c589419299a1 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 708.422727] env[63538]: DEBUG nova.compute.manager [req-d9da931a-562c-42a7-82a5-db959b9ddce6 req-80e6b6d5-2b59-4ea2-bec5-0421f43484cc service nova] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Refreshing instance network info cache due to event network-changed-719964b3-d739-46b8-ae43-c589419299a1. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 708.422920] env[63538]: DEBUG oslo_concurrency.lockutils [req-d9da931a-562c-42a7-82a5-db959b9ddce6 req-80e6b6d5-2b59-4ea2-bec5-0421f43484cc service nova] Acquiring lock "refresh_cache-1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.423081] env[63538]: DEBUG oslo_concurrency.lockutils [req-d9da931a-562c-42a7-82a5-db959b9ddce6 req-80e6b6d5-2b59-4ea2-bec5-0421f43484cc service nova] Acquired lock "refresh_cache-1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.423267] env[63538]: DEBUG nova.network.neutron [req-d9da931a-562c-42a7-82a5-db959b9ddce6 req-80e6b6d5-2b59-4ea2-bec5-0421f43484cc service nova] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Refreshing network info cache for port 719964b3-d739-46b8-ae43-c589419299a1 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 708.572608] env[63538]: DEBUG nova.compute.manager [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 708.601751] env[63538]: DEBUG nova.virt.hardware [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 708.601907] env[63538]: DEBUG nova.virt.hardware [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 708.602013] env[63538]: DEBUG nova.virt.hardware [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 708.602203] env[63538]: DEBUG nova.virt.hardware [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 708.602349] env[63538]: DEBUG nova.virt.hardware [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 708.602522] env[63538]: DEBUG nova.virt.hardware [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 708.602748] env[63538]: DEBUG nova.virt.hardware [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 708.602925] env[63538]: DEBUG nova.virt.hardware [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 708.603126] env[63538]: DEBUG nova.virt.hardware [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 708.603332] env[63538]: DEBUG nova.virt.hardware [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 708.603588] env[63538]: DEBUG nova.virt.hardware [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 708.604523] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d8f593-6973-4163-b5ab-a4546dab6730 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.616250] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16ea16b4-833c-4248-a54e-5bf61d6da6fd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.637917] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Instance VIF info [] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 708.644919] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Creating folder: Project (128b2f8fb95d40bda0cf9ea35684928a). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 708.646421] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3c226d25-f571-4176-a676-c701274a3770 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.660616] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Created folder: Project (128b2f8fb95d40bda0cf9ea35684928a) in parent group-v992234. [ 708.660616] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Creating folder: Instances. Parent ref: group-v992326. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 708.660799] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3c2ad924-2fda-4476-9921-7199308938ef {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.672745] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Created folder: Instances in parent group-v992326. [ 708.672980] env[63538]: DEBUG oslo.service.loopingcall [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 708.673323] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 708.673843] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-25bc8845-760a-4c81-b3d8-6925e9df6642 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.698016] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 708.698016] env[63538]: value = "task-5100658" [ 708.698016] env[63538]: _type = "Task" [ 708.698016] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.707993] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100658, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.734690] env[63538]: DEBUG oslo_concurrency.lockutils [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Acquiring lock "6850191a-4190-4795-ae18-830b41a76085" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.734876] env[63538]: DEBUG oslo_concurrency.lockutils [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Lock "6850191a-4190-4795-ae18-830b41a76085" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.752457] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.216s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.753254] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.700s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.754867] env[63538]: INFO nova.compute.claims [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 708.799018] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52472a10-bcac-a164-cf64-f5d9dca746f1, 'name': SearchDatastore_Task, 'duration_secs': 0.011937} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.799347] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 708.799594] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 708.799839] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.799978] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.800171] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 708.800458] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-91e377d8-2655-4841-9b1f-469cc1363611 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.812872] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 708.812872] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 708.812872] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6593c0c-02c1-4814-a6dd-a65981beb3c2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.819568] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 708.819568] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521b7123-70bd-45f3-13f1-c7169e4c4071" [ 708.819568] env[63538]: _type = "Task" [ 708.819568] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.824628] env[63538]: INFO nova.scheduler.client.report [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Deleted allocations for instance bf54098e-91a8-403f-a6fe-b58a62daaadb [ 708.836857] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521b7123-70bd-45f3-13f1-c7169e4c4071, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.993124] env[63538]: DEBUG oslo_concurrency.lockutils [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Acquiring lock "47500aaa-92fc-454c-badd-d6f8a2203083" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.993124] env[63538]: DEBUG oslo_concurrency.lockutils [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Lock "47500aaa-92fc-454c-badd-d6f8a2203083" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.993124] env[63538]: DEBUG oslo_concurrency.lockutils [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Acquiring lock "47500aaa-92fc-454c-badd-d6f8a2203083-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.993124] env[63538]: DEBUG oslo_concurrency.lockutils [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Lock "47500aaa-92fc-454c-badd-d6f8a2203083-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.993686] env[63538]: DEBUG oslo_concurrency.lockutils [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Lock "47500aaa-92fc-454c-badd-d6f8a2203083-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.998203] env[63538]: INFO nova.compute.manager [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Terminating instance [ 709.002768] env[63538]: DEBUG nova.compute.manager [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 709.003466] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 709.004129] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-65d96767-f0e7-427a-851d-d30eecac9488 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.015129] env[63538]: DEBUG oslo_vmware.api [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Waiting for the task: (returnval){ [ 709.015129] env[63538]: value = "task-5100659" [ 709.015129] env[63538]: _type = "Task" [ 709.015129] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.031378] env[63538]: DEBUG oslo_vmware.api [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100659, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.219489] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100658, 'name': CreateVM_Task, 'duration_secs': 0.328076} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.219673] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 709.220203] env[63538]: DEBUG oslo_concurrency.lockutils [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.220573] env[63538]: DEBUG oslo_concurrency.lockutils [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.220774] env[63538]: DEBUG oslo_concurrency.lockutils [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 709.221125] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70b5c7b8-5a8e-49a6-b6a4-123d6f8fa0b5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.229289] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Waiting for the task: (returnval){ [ 709.229289] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d84aa1-acc2-7372-e5ca-31b73cd84913" [ 709.229289] env[63538]: _type = "Task" [ 709.229289] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.245200] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d84aa1-acc2-7372-e5ca-31b73cd84913, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.334700] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521b7123-70bd-45f3-13f1-c7169e4c4071, 'name': SearchDatastore_Task, 'duration_secs': 0.011821} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.335921] env[63538]: DEBUG nova.network.neutron [req-d9da931a-562c-42a7-82a5-db959b9ddce6 req-80e6b6d5-2b59-4ea2-bec5-0421f43484cc service nova] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Updated VIF entry in instance network info cache for port 719964b3-d739-46b8-ae43-c589419299a1. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 709.336131] env[63538]: DEBUG nova.network.neutron [req-d9da931a-562c-42a7-82a5-db959b9ddce6 req-80e6b6d5-2b59-4ea2-bec5-0421f43484cc service nova] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Updating instance_info_cache with network_info: [{"id": "719964b3-d739-46b8-ae43-c589419299a1", "address": "fa:16:3e:be:ca:ae", "network": {"id": "01450801-f549-4844-80bd-208ec405c65f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684310085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "492427e54e1048f292dab2abdac71af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap719964b3-d7", "ovs_interfaceid": "719964b3-d739-46b8-ae43-c589419299a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.340131] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86eb3de1-e82f-4b7d-a1d9-2ac0576a848e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.342933] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d760e8ec-8f61-4ec3-8759-06d94fd0b2b8 tempest-VolumesAssistedSnapshotsTest-2067238123 tempest-VolumesAssistedSnapshotsTest-2067238123-project-member] Lock "bf54098e-91a8-403f-a6fe-b58a62daaadb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.713s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 709.348818] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 709.348818] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52349c5f-0293-af37-684a-a5067d8e7748" [ 709.348818] env[63538]: _type = "Task" [ 709.348818] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.358872] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52349c5f-0293-af37-684a-a5067d8e7748, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.528998] env[63538]: DEBUG oslo_vmware.api [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100659, 'name': PowerOffVM_Task, 'duration_secs': 0.344822} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.528998] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 709.528998] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Volume detach. Driver type: vmdk {{(pid=63538) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 709.529234] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992278', 'volume_id': '9cb99482-8e0b-49d1-9249-8f411b389c09', 'name': 'volume-9cb99482-8e0b-49d1-9249-8f411b389c09', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '47500aaa-92fc-454c-badd-d6f8a2203083', 'attached_at': '', 'detached_at': '', 'volume_id': '9cb99482-8e0b-49d1-9249-8f411b389c09', 'serial': '9cb99482-8e0b-49d1-9249-8f411b389c09'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 709.531552] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1bc437d-d0cf-4f0b-ab01-84fe6f5712e1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.556174] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac129c3-96b5-462e-8d75-1fdde92cdcef {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.566890] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7515e138-f919-419d-af58-4647bcd24d72 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.595717] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c9a5bbb-228b-4be4-bc0e-62f7136227de {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.614841] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] The volume has not been displaced from its original location: [datastore2] volume-9cb99482-8e0b-49d1-9249-8f411b389c09/volume-9cb99482-8e0b-49d1-9249-8f411b389c09.vmdk. No consolidation needed. {{(pid=63538) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 709.621640] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Reconfiguring VM instance instance-00000019 to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 709.623642] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a34356e-f9c8-42ea-969c-f65f3eb06941 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.640907] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Acquiring lock "5bf7ed57-62d5-4abc-96d8-78b979baed92" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.641195] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Lock "5bf7ed57-62d5-4abc-96d8-78b979baed92" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.648999] env[63538]: DEBUG oslo_vmware.api [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Waiting for the task: (returnval){ [ 709.648999] env[63538]: value = "task-5100660" [ 709.648999] env[63538]: _type = "Task" [ 709.648999] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.659442] env[63538]: DEBUG oslo_vmware.api [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100660, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.743275] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d84aa1-acc2-7372-e5ca-31b73cd84913, 'name': SearchDatastore_Task, 'duration_secs': 0.039275} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.743612] env[63538]: DEBUG oslo_concurrency.lockutils [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.743856] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 709.744085] env[63538]: DEBUG oslo_concurrency.lockutils [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.839515] env[63538]: DEBUG oslo_concurrency.lockutils [req-d9da931a-562c-42a7-82a5-db959b9ddce6 req-80e6b6d5-2b59-4ea2-bec5-0421f43484cc service nova] Releasing lock "refresh_cache-1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.861600] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52349c5f-0293-af37-684a-a5067d8e7748, 'name': SearchDatastore_Task, 'duration_secs': 0.025836} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.861866] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.862249] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7/1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 709.862580] env[63538]: DEBUG oslo_concurrency.lockutils [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.862780] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 709.862996] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-69d58412-b472-4eca-a8d6-a0e90735028b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.865798] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2eeeda2d-2294-4b34-af3d-9c3950c655cf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.877729] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 709.877729] env[63538]: value = "task-5100661" [ 709.877729] env[63538]: _type = "Task" [ 709.877729] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.880125] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 709.880125] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 709.884347] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3aa5ab99-0a8b-4a87-8cf4-e32ebf33261f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.901417] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100661, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.902146] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Waiting for the task: (returnval){ [ 709.902146] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d7baca-6ca6-b237-e376-9b6fc6db2257" [ 709.902146] env[63538]: _type = "Task" [ 709.902146] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.922793] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d7baca-6ca6-b237-e376-9b6fc6db2257, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.166447] env[63538]: DEBUG oslo_vmware.api [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100660, 'name': ReconfigVM_Task, 'duration_secs': 0.279298} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.166616] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Reconfigured VM instance instance-00000019 to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 710.171945] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5d0a0d0-d5d0-46e0-808b-e3ec088a90a2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.192310] env[63538]: DEBUG oslo_vmware.api [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Waiting for the task: (returnval){ [ 710.192310] env[63538]: value = "task-5100662" [ 710.192310] env[63538]: _type = "Task" [ 710.192310] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.202902] env[63538]: DEBUG oslo_vmware.api [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100662, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.392622] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100661, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.416324] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d7baca-6ca6-b237-e376-9b6fc6db2257, 'name': SearchDatastore_Task, 'duration_secs': 0.020959} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.421402] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c7b2d3b-48a5-4442-a634-5839c6d1646c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.428857] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Waiting for the task: (returnval){ [ 710.428857] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520e53a5-1fd2-2801-2ad2-8fe5d0cce57e" [ 710.428857] env[63538]: _type = "Task" [ 710.428857] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.440165] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520e53a5-1fd2-2801-2ad2-8fe5d0cce57e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.553669] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a26cb33-21d0-4423-91c7-22cdf02a258f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.566377] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e40923-4d8b-4051-9f3d-2beb296ce765 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.599584] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f7e609-c061-4a70-8443-5d8ab01268bc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.611123] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b847010-cc6f-4435-bb42-e97f422eeb3f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.630798] env[63538]: DEBUG nova.compute.provider_tree [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 710.707906] env[63538]: DEBUG oslo_vmware.api [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100662, 'name': ReconfigVM_Task, 'duration_secs': 0.154251} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.708061] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992278', 'volume_id': '9cb99482-8e0b-49d1-9249-8f411b389c09', 'name': 'volume-9cb99482-8e0b-49d1-9249-8f411b389c09', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '47500aaa-92fc-454c-badd-d6f8a2203083', 'attached_at': '', 'detached_at': '', 'volume_id': '9cb99482-8e0b-49d1-9249-8f411b389c09', 'serial': '9cb99482-8e0b-49d1-9249-8f411b389c09'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 710.708246] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 710.709135] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc24e16-4217-4d37-991a-f32ffe5398f2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.718167] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 710.718418] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee6dc4f8-6538-40fe-bf29-755fba2d7987 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.816145] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 710.816517] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 710.816625] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Deleting the datastore file [datastore2] 47500aaa-92fc-454c-badd-d6f8a2203083 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 710.816905] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a2e16198-ed65-4af6-8333-f06a3ffeb6e4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.825898] env[63538]: DEBUG oslo_vmware.api [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Waiting for the task: (returnval){ [ 710.825898] env[63538]: value = "task-5100664" [ 710.825898] env[63538]: _type = "Task" [ 710.825898] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.836448] env[63538]: DEBUG oslo_vmware.api [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100664, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.890038] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100661, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.929167} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.890437] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7/1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 710.890728] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 710.891118] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-573dac0a-fc3f-42ca-956f-92ccb523d283 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.899480] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 710.899480] env[63538]: value = "task-5100665" [ 710.899480] env[63538]: _type = "Task" [ 710.899480] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.910100] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100665, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.945613] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520e53a5-1fd2-2801-2ad2-8fe5d0cce57e, 'name': SearchDatastore_Task, 'duration_secs': 0.055155} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.945613] env[63538]: DEBUG oslo_concurrency.lockutils [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.945613] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] fd650fdc-6b49-4051-8267-bbd1f0cb86f1/fd650fdc-6b49-4051-8267-bbd1f0cb86f1.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 710.945613] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-adc85457-d382-412c-b1ce-b358136c164a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.952383] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Waiting for the task: (returnval){ [ 710.952383] env[63538]: value = "task-5100666" [ 710.952383] env[63538]: _type = "Task" [ 710.952383] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.961528] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100666, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.135186] env[63538]: DEBUG nova.scheduler.client.report [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 711.344822] env[63538]: DEBUG oslo_vmware.api [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Task: {'id': task-5100664, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126639} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.345219] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 711.345477] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 711.345715] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 711.345953] env[63538]: INFO nova.compute.manager [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Took 2.34 seconds to destroy the instance on the hypervisor. [ 711.346268] env[63538]: DEBUG oslo.service.loopingcall [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 711.346526] env[63538]: DEBUG nova.compute.manager [-] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 711.346703] env[63538]: DEBUG nova.network.neutron [-] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 711.410969] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100665, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071145} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.411694] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 711.412555] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c2f093-b229-4fb3-9edd-250edb76b712 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.443635] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7/1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 711.444025] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a29a966-a395-4c37-bda6-fc9f05fe43aa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.473770] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100666, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.475685] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 711.475685] env[63538]: value = "task-5100667" [ 711.475685] env[63538]: _type = "Task" [ 711.475685] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.487799] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100667, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.644954] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.889s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 711.644954] env[63538]: DEBUG nova.compute.manager [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 711.646925] env[63538]: DEBUG oslo_concurrency.lockutils [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.768s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.648721] env[63538]: INFO nova.compute.claims [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 711.975808] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100666, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.988201] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100667, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.154648] env[63538]: DEBUG nova.compute.utils [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 712.158757] env[63538]: DEBUG nova.compute.manager [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 712.158961] env[63538]: DEBUG nova.network.neutron [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 712.247793] env[63538]: DEBUG nova.policy [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '311a3712ead145899a7768b5297f056a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f5784127fe9d4eefaa1f55f0eacdb91d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 712.363854] env[63538]: DEBUG nova.compute.manager [req-ea77467f-922b-4433-abd7-812f2ccc6a89 req-ea8972d6-e123-40d6-97c5-f800aa2a37ca service nova] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Received event network-vif-deleted-845bbf0e-88f5-474e-b875-0a12bfaebd27 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 712.364067] env[63538]: INFO nova.compute.manager [req-ea77467f-922b-4433-abd7-812f2ccc6a89 req-ea8972d6-e123-40d6-97c5-f800aa2a37ca service nova] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Neutron deleted interface 845bbf0e-88f5-474e-b875-0a12bfaebd27; detaching it from the instance and deleting it from the info cache [ 712.364237] env[63538]: DEBUG nova.network.neutron [req-ea77467f-922b-4433-abd7-812f2ccc6a89 req-ea8972d6-e123-40d6-97c5-f800aa2a37ca service nova] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.474625] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100666, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.123199} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.474906] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] fd650fdc-6b49-4051-8267-bbd1f0cb86f1/fd650fdc-6b49-4051-8267-bbd1f0cb86f1.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 712.475142] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 712.475382] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-74305c96-3667-4197-8739-14f718de7d3a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.492159] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100667, 'name': ReconfigVM_Task, 'duration_secs': 0.538351} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.492464] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Waiting for the task: (returnval){ [ 712.492464] env[63538]: value = "task-5100668" [ 712.492464] env[63538]: _type = "Task" [ 712.492464] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.492708] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Reconfigured VM instance instance-00000024 to attach disk [datastore1] 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7/1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 712.493481] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eac00d3d-9eb1-40dd-8db0-4dd795d0b4f2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.504805] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100668, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.506218] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 712.506218] env[63538]: value = "task-5100669" [ 712.506218] env[63538]: _type = "Task" [ 712.506218] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.514959] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100669, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.626176] env[63538]: DEBUG nova.network.neutron [-] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.658269] env[63538]: DEBUG nova.compute.manager [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 712.802838] env[63538]: DEBUG nova.network.neutron [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Successfully created port: 2357c1a1-6201-44ae-9461-80b6269920a2 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 712.872606] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1d9be26c-dd05-4e25-be16-c821beb019d1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.888680] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-363e6884-7636-499d-9047-3307e2c37e9d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.932682] env[63538]: DEBUG nova.compute.manager [req-ea77467f-922b-4433-abd7-812f2ccc6a89 req-ea8972d6-e123-40d6-97c5-f800aa2a37ca service nova] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Detach interface failed, port_id=845bbf0e-88f5-474e-b875-0a12bfaebd27, reason: Instance 47500aaa-92fc-454c-badd-d6f8a2203083 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 713.002972] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100668, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070602} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.006019] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 713.007129] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae95cbb9-c936-45be-8e6e-0643129f2589 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.034734] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] fd650fdc-6b49-4051-8267-bbd1f0cb86f1/fd650fdc-6b49-4051-8267-bbd1f0cb86f1.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 713.043348] env[63538]: DEBUG oslo_concurrency.lockutils [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "99de5226-a27c-47c5-90fa-5f0c7204df1c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 713.043626] env[63538]: DEBUG oslo_concurrency.lockutils [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "99de5226-a27c-47c5-90fa-5f0c7204df1c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.043838] env[63538]: DEBUG oslo_concurrency.lockutils [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "99de5226-a27c-47c5-90fa-5f0c7204df1c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 713.044037] env[63538]: DEBUG oslo_concurrency.lockutils [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "99de5226-a27c-47c5-90fa-5f0c7204df1c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.044299] env[63538]: DEBUG oslo_concurrency.lockutils [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "99de5226-a27c-47c5-90fa-5f0c7204df1c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.046248] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-60905bff-2ac0-492c-8a00-a7e7eeeaa753 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.061254] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100669, 'name': Rename_Task, 'duration_secs': 0.169629} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.062780] env[63538]: INFO nova.compute.manager [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Terminating instance [ 713.064215] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 713.065094] env[63538]: DEBUG nova.compute.manager [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 713.065319] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 713.066083] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-847495e8-6a65-40ab-a4bd-68675a80413d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.068528] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-953e2e51-1132-410f-b700-37480fd31d00 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.075774] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Waiting for the task: (returnval){ [ 713.075774] env[63538]: value = "task-5100670" [ 713.075774] env[63538]: _type = "Task" [ 713.075774] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.084507] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 713.084507] env[63538]: value = "task-5100671" [ 713.084507] env[63538]: _type = "Task" [ 713.084507] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.084931] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 713.088416] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ae6bf596-3efa-460c-b0ff-029376bc4b41 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.091226] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100670, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.104372] env[63538]: DEBUG oslo_vmware.api [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 713.104372] env[63538]: value = "task-5100672" [ 713.104372] env[63538]: _type = "Task" [ 713.104372] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.105225] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100671, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.116344] env[63538]: DEBUG oslo_vmware.api [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100672, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.130384] env[63538]: INFO nova.compute.manager [-] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Took 1.78 seconds to deallocate network for instance. [ 713.401587] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-255905d8-a691-4bcb-8879-69b90dc5aeb2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.409282] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc79618f-116b-4427-af9f-240509ab1b55 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.443455] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a04a276d-ea51-47be-b5a3-38361a1623b8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.450226] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f231b127-1c76-45d0-bf89-2d1e4a516f44 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.464758] env[63538]: DEBUG nova.compute.provider_tree [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 713.587513] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100670, 'name': ReconfigVM_Task, 'duration_secs': 0.326959} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.587808] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Reconfigured VM instance instance-00000025 to attach disk [datastore1] fd650fdc-6b49-4051-8267-bbd1f0cb86f1/fd650fdc-6b49-4051-8267-bbd1f0cb86f1.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 713.588525] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b20bf4b5-e34a-44fe-863d-8049ed472fa6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.601046] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100671, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.604119] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Waiting for the task: (returnval){ [ 713.604119] env[63538]: value = "task-5100673" [ 713.604119] env[63538]: _type = "Task" [ 713.604119] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.615103] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100673, 'name': Rename_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.618228] env[63538]: DEBUG oslo_vmware.api [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100672, 'name': PowerOffVM_Task, 'duration_secs': 0.221598} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.618490] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 713.618916] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 713.618916] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-02555dac-cc92-4fdc-89fb-3cd6cef44ad0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.671523] env[63538]: DEBUG nova.compute.manager [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 713.688981] env[63538]: INFO nova.compute.manager [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Took 0.56 seconds to detach 1 volumes for instance. [ 713.691486] env[63538]: DEBUG nova.compute.manager [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Deleting volume: 9cb99482-8e0b-49d1-9249-8f411b389c09 {{(pid=63538) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 713.705565] env[63538]: DEBUG nova.virt.hardware [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 713.705831] env[63538]: DEBUG nova.virt.hardware [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 713.705988] env[63538]: DEBUG nova.virt.hardware [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 713.706202] env[63538]: DEBUG nova.virt.hardware [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 713.706364] env[63538]: DEBUG nova.virt.hardware [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 713.706510] env[63538]: DEBUG nova.virt.hardware [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 713.706718] env[63538]: DEBUG nova.virt.hardware [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 713.706871] env[63538]: DEBUG nova.virt.hardware [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 713.707051] env[63538]: DEBUG nova.virt.hardware [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 713.707217] env[63538]: DEBUG nova.virt.hardware [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 713.707440] env[63538]: DEBUG nova.virt.hardware [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 713.708256] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf78b5b-2bb5-4051-bf82-9da797d97fb3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.717887] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34307569-578f-4a13-8ab6-69720b90c325 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.775635] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 713.775913] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 713.776165] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Deleting the datastore file [datastore1] 99de5226-a27c-47c5-90fa-5f0c7204df1c {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 713.776747] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3433866f-a779-4c33-9e0e-37f0516e680e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.785185] env[63538]: DEBUG oslo_vmware.api [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 713.785185] env[63538]: value = "task-5100675" [ 713.785185] env[63538]: _type = "Task" [ 713.785185] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.797755] env[63538]: DEBUG oslo_vmware.api [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100675, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.996587] env[63538]: ERROR nova.scheduler.client.report [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [req-7cf9ec52-b209-49d8-a174-bf403d5f25ec] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f65218a4-1d3d-476a-9093-01cae92c8635. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7cf9ec52-b209-49d8-a174-bf403d5f25ec"}]} [ 714.015798] env[63538]: DEBUG nova.scheduler.client.report [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Refreshing inventories for resource provider f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 714.032704] env[63538]: DEBUG nova.scheduler.client.report [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Updating ProviderTree inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 714.032704] env[63538]: DEBUG nova.compute.provider_tree [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 714.048267] env[63538]: DEBUG nova.scheduler.client.report [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Refreshing aggregate associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, aggregates: None {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 714.072780] env[63538]: DEBUG nova.scheduler.client.report [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Refreshing trait associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 714.101447] env[63538]: DEBUG oslo_vmware.api [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100671, 'name': PowerOnVM_Task, 'duration_secs': 0.609583} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.101726] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 714.101922] env[63538]: INFO nova.compute.manager [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Took 9.84 seconds to spawn the instance on the hypervisor. [ 714.102112] env[63538]: DEBUG nova.compute.manager [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 714.102914] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a3c55a-91c2-4cce-99c3-7785c53eb2b2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.123518] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100673, 'name': Rename_Task, 'duration_secs': 0.14868} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.124401] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 714.124401] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d28d1236-166c-4b0f-b586-9a9341909d23 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.136095] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Waiting for the task: (returnval){ [ 714.136095] env[63538]: value = "task-5100677" [ 714.136095] env[63538]: _type = "Task" [ 714.136095] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.145242] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100677, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.252517] env[63538]: DEBUG oslo_concurrency.lockutils [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.300093] env[63538]: DEBUG oslo_vmware.api [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100675, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163558} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.300374] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 714.300563] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 714.300741] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 714.300952] env[63538]: INFO nova.compute.manager [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Took 1.24 seconds to destroy the instance on the hypervisor. [ 714.301409] env[63538]: DEBUG oslo.service.loopingcall [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 714.301655] env[63538]: DEBUG nova.compute.manager [-] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 714.301756] env[63538]: DEBUG nova.network.neutron [-] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 714.640742] env[63538]: INFO nova.compute.manager [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Took 45.57 seconds to build instance. [ 714.654604] env[63538]: DEBUG oslo_vmware.api [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100677, 'name': PowerOnVM_Task, 'duration_secs': 0.477709} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.655619] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 714.655716] env[63538]: INFO nova.compute.manager [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Took 6.08 seconds to spawn the instance on the hypervisor. [ 714.657102] env[63538]: DEBUG nova.compute.manager [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 714.657102] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8cb3668-4ec0-4780-9cd2-573cb5e9710d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.669506] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5d6db69-f8c2-43dd-a9b1-4c609bd35875 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.677959] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31778f71-eb14-4463-95ed-8122aa33e75f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.716219] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9144a822-9a43-45af-a4c0-75bda896e6a3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.725196] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2311cbe8-a99f-4a71-b5df-f9e24bd458cc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.741320] env[63538]: DEBUG nova.compute.provider_tree [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.775641] env[63538]: DEBUG nova.compute.manager [req-75b055a5-1943-4d61-a311-a293aa24e86e req-1890f8f7-90a6-4bd9-8e60-4f5d1ce3a38c service nova] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Received event network-vif-plugged-2357c1a1-6201-44ae-9461-80b6269920a2 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 714.775740] env[63538]: DEBUG oslo_concurrency.lockutils [req-75b055a5-1943-4d61-a311-a293aa24e86e req-1890f8f7-90a6-4bd9-8e60-4f5d1ce3a38c service nova] Acquiring lock "6f29f063-ddb5-491a-a1a0-7c9ed65a1718-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.776648] env[63538]: DEBUG oslo_concurrency.lockutils [req-75b055a5-1943-4d61-a311-a293aa24e86e req-1890f8f7-90a6-4bd9-8e60-4f5d1ce3a38c service nova] Lock "6f29f063-ddb5-491a-a1a0-7c9ed65a1718-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 714.776648] env[63538]: DEBUG oslo_concurrency.lockutils [req-75b055a5-1943-4d61-a311-a293aa24e86e req-1890f8f7-90a6-4bd9-8e60-4f5d1ce3a38c service nova] Lock "6f29f063-ddb5-491a-a1a0-7c9ed65a1718-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 714.776828] env[63538]: DEBUG nova.compute.manager [req-75b055a5-1943-4d61-a311-a293aa24e86e req-1890f8f7-90a6-4bd9-8e60-4f5d1ce3a38c service nova] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] No waiting events found dispatching network-vif-plugged-2357c1a1-6201-44ae-9461-80b6269920a2 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 714.776998] env[63538]: WARNING nova.compute.manager [req-75b055a5-1943-4d61-a311-a293aa24e86e req-1890f8f7-90a6-4bd9-8e60-4f5d1ce3a38c service nova] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Received unexpected event network-vif-plugged-2357c1a1-6201-44ae-9461-80b6269920a2 for instance with vm_state building and task_state spawning. [ 714.808222] env[63538]: DEBUG nova.compute.manager [req-45e7a1d5-4f63-4468-8bb0-a0f15197f0d8 req-ecfc3070-8364-49b4-b842-9ea06b6abfb9 service nova] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Received event network-vif-deleted-ed12b7c5-6f91-4211-b601-6494ca052b0b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 714.808416] env[63538]: INFO nova.compute.manager [req-45e7a1d5-4f63-4468-8bb0-a0f15197f0d8 req-ecfc3070-8364-49b4-b842-9ea06b6abfb9 service nova] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Neutron deleted interface ed12b7c5-6f91-4211-b601-6494ca052b0b; detaching it from the instance and deleting it from the info cache [ 714.809800] env[63538]: DEBUG nova.network.neutron [req-45e7a1d5-4f63-4468-8bb0-a0f15197f0d8 req-ecfc3070-8364-49b4-b842-9ea06b6abfb9 service nova] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.865582] env[63538]: DEBUG nova.network.neutron [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Successfully updated port: 2357c1a1-6201-44ae-9461-80b6269920a2 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 715.147767] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f77e73cb-7657-44d6-bc88-7a515b77efb5 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.299s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.149025] env[63538]: DEBUG nova.network.neutron [-] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.182863] env[63538]: INFO nova.compute.manager [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Took 45.34 seconds to build instance. [ 715.244443] env[63538]: DEBUG nova.scheduler.client.report [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 715.311603] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4324c730-f31b-4538-94ae-26f77252c376 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.322077] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f04cbefb-99de-4980-9e2a-b44e72a94964 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.359310] env[63538]: DEBUG nova.compute.manager [req-45e7a1d5-4f63-4468-8bb0-a0f15197f0d8 req-ecfc3070-8364-49b4-b842-9ea06b6abfb9 service nova] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Detach interface failed, port_id=ed12b7c5-6f91-4211-b601-6494ca052b0b, reason: Instance 99de5226-a27c-47c5-90fa-5f0c7204df1c could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 715.370980] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquiring lock "refresh_cache-6f29f063-ddb5-491a-a1a0-7c9ed65a1718" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.371056] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquired lock "refresh_cache-6f29f063-ddb5-491a-a1a0-7c9ed65a1718" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.371201] env[63538]: DEBUG nova.network.neutron [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 715.652052] env[63538]: INFO nova.compute.manager [-] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Took 1.35 seconds to deallocate network for instance. [ 715.652987] env[63538]: DEBUG nova.compute.manager [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 715.685754] env[63538]: DEBUG oslo_concurrency.lockutils [None req-13dfd116-c1f1-4fc1-9a85-a81fd9531130 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Lock "fd650fdc-6b49-4051-8267-bbd1f0cb86f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.540s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.755250] env[63538]: DEBUG oslo_concurrency.lockutils [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.109s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.755852] env[63538]: DEBUG nova.compute.manager [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 715.759954] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 40.469s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.773938] env[63538]: INFO nova.compute.manager [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Rebuilding instance [ 715.841760] env[63538]: DEBUG nova.compute.manager [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 715.842680] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e72b37e3-6e6c-4880-ab72-bd4f5f32b255 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.917849] env[63538]: DEBUG nova.network.neutron [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 715.925751] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 715.925858] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.926144] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 715.926464] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.926788] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.930386] env[63538]: INFO nova.compute.manager [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Terminating instance [ 715.932615] env[63538]: DEBUG nova.compute.manager [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 715.932822] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 715.933711] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b848cf6a-ef11-46f0-a84d-a10abe1473d7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.942051] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 715.942307] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-db74b436-8055-4b95-92d8-8dc2a5583785 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.949649] env[63538]: DEBUG oslo_vmware.api [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 715.949649] env[63538]: value = "task-5100678" [ 715.949649] env[63538]: _type = "Task" [ 715.949649] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.963263] env[63538]: DEBUG oslo_vmware.api [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100678, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.123891] env[63538]: DEBUG nova.network.neutron [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Updating instance_info_cache with network_info: [{"id": "2357c1a1-6201-44ae-9461-80b6269920a2", "address": "fa:16:3e:c1:2d:ff", "network": {"id": "ad5bb52a-b208-49a5-986c-0bfcdffe7d94", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-907734579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5784127fe9d4eefaa1f55f0eacdb91d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2357c1a1-62", "ovs_interfaceid": "2357c1a1-6201-44ae-9461-80b6269920a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.165023] env[63538]: DEBUG oslo_concurrency.lockutils [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.181996] env[63538]: DEBUG oslo_concurrency.lockutils [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.187839] env[63538]: DEBUG nova.compute.manager [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 716.261313] env[63538]: DEBUG nova.compute.utils [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 716.262800] env[63538]: DEBUG nova.compute.manager [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Not allocating networking since 'none' was specified. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 716.358248] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 716.358331] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0b92bd03-aafd-46b1-a1ea-d71bb59c6235 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.367059] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Waiting for the task: (returnval){ [ 716.367059] env[63538]: value = "task-5100679" [ 716.367059] env[63538]: _type = "Task" [ 716.367059] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.375412] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100679, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.461513] env[63538]: DEBUG oslo_vmware.api [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100678, 'name': PowerOffVM_Task, 'duration_secs': 0.205635} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.461766] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 716.461933] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 716.462193] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f162d6ae-9016-4729-bcee-e0a439c0e8bd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.527147] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 716.527234] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 716.527568] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Deleting the datastore file [datastore1] 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 716.527649] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5949f230-7f6f-4c95-9ef5-57f419cb4876 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.537253] env[63538]: DEBUG oslo_vmware.api [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 716.537253] env[63538]: value = "task-5100681" [ 716.537253] env[63538]: _type = "Task" [ 716.537253] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.545934] env[63538]: DEBUG oslo_vmware.api [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100681, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.627720] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Releasing lock "refresh_cache-6f29f063-ddb5-491a-a1a0-7c9ed65a1718" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 716.628128] env[63538]: DEBUG nova.compute.manager [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Instance network_info: |[{"id": "2357c1a1-6201-44ae-9461-80b6269920a2", "address": "fa:16:3e:c1:2d:ff", "network": {"id": "ad5bb52a-b208-49a5-986c-0bfcdffe7d94", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-907734579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5784127fe9d4eefaa1f55f0eacdb91d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2357c1a1-62", "ovs_interfaceid": "2357c1a1-6201-44ae-9461-80b6269920a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 716.628562] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:2d:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '00a15667-7ca5-4dc9-be92-164750d87988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2357c1a1-6201-44ae-9461-80b6269920a2', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 716.637198] env[63538]: DEBUG oslo.service.loopingcall [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 716.637479] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 716.637780] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f4c3fe19-bfe6-49b5-b61d-75be8fbff916 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.659583] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 716.659583] env[63538]: value = "task-5100682" [ 716.659583] env[63538]: _type = "Task" [ 716.659583] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.668956] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100682, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.710865] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.776986] env[63538]: DEBUG nova.compute.manager [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 716.814221] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance c8a02fa6-5232-4dde-b6dd-0da1089b6bbf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 716.814221] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 8fb62f47-cbf2-4b46-bc33-845e832f9ef0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 716.814537] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance f9fa5578-acf3-416f-9cb0-8ceb00e5132d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 716.814587] env[63538]: WARNING nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 36d40b69-fae7-4867-afa1-4befdc96bde0 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 716.814747] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance ede967c0-ec3a-4f26-8290-0ee36890cd75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 716.814903] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance dbf48807-08a7-46d1-8454-42437a9f87c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 716.815129] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 2e1b0bc7-3909-48e2-b9be-26822a57ee67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 716.815270] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance bd222761-92aa-4f2c-a752-ead9c498ee7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 716.815539] env[63538]: WARNING nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 47500aaa-92fc-454c-badd-d6f8a2203083 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 716.815726] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance b5593b74-fe89-43f5-a8c6-e73159b4efac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 716.815893] env[63538]: WARNING nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance ee9fe572-7a17-46db-8330-4b6f632c6b2c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 716.816083] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 5421e135-9581-4f81-aa8a-2a604887a1df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 716.816540] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 65fc18ff-8901-40d2-8a5b-640eb9768240 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 716.816540] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 43729260-d138-4e62-9cc5-4db3ca39f5d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 716.816727] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance a7bb1869-5553-40d8-9c0b-366ccdef5fae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 716.816899] env[63538]: WARNING nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 716.817112] env[63538]: WARNING nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 99de5226-a27c-47c5-90fa-5f0c7204df1c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 716.817310] env[63538]: WARNING nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 61068d41-5f5d-4ee5-b546-71da13eff93d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 716.817475] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 716.817621] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance fd650fdc-6b49-4051-8267-bbd1f0cb86f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 716.817766] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 6f29f063-ddb5-491a-a1a0-7c9ed65a1718 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 716.878364] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100679, 'name': PowerOffVM_Task, 'duration_secs': 0.132396} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.878901] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 716.878947] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 716.880366] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a5ec3b-fc48-411d-aae4-4466b2ac5b59 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.889959] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 716.890108] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c2ebc19-2bcd-414e-bbba-f52e12343bc5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.921128] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 716.921332] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 716.921566] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Deleting the datastore file [datastore1] fd650fdc-6b49-4051-8267-bbd1f0cb86f1 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 716.921863] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ea3f0c60-181c-4d5f-b3d5-cd9586eddbdf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.930859] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Waiting for the task: (returnval){ [ 716.930859] env[63538]: value = "task-5100684" [ 716.930859] env[63538]: _type = "Task" [ 716.930859] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.939699] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100684, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.980301] env[63538]: DEBUG nova.compute.manager [req-c5a9dd83-2c72-45cf-a878-43a21a95d172 req-2d674558-8691-4573-b6d1-bc33ca90be86 service nova] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Received event network-changed-2357c1a1-6201-44ae-9461-80b6269920a2 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 716.980301] env[63538]: DEBUG nova.compute.manager [req-c5a9dd83-2c72-45cf-a878-43a21a95d172 req-2d674558-8691-4573-b6d1-bc33ca90be86 service nova] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Refreshing instance network info cache due to event network-changed-2357c1a1-6201-44ae-9461-80b6269920a2. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 716.980599] env[63538]: DEBUG oslo_concurrency.lockutils [req-c5a9dd83-2c72-45cf-a878-43a21a95d172 req-2d674558-8691-4573-b6d1-bc33ca90be86 service nova] Acquiring lock "refresh_cache-6f29f063-ddb5-491a-a1a0-7c9ed65a1718" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.980599] env[63538]: DEBUG oslo_concurrency.lockutils [req-c5a9dd83-2c72-45cf-a878-43a21a95d172 req-2d674558-8691-4573-b6d1-bc33ca90be86 service nova] Acquired lock "refresh_cache-6f29f063-ddb5-491a-a1a0-7c9ed65a1718" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.980755] env[63538]: DEBUG nova.network.neutron [req-c5a9dd83-2c72-45cf-a878-43a21a95d172 req-2d674558-8691-4573-b6d1-bc33ca90be86 service nova] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Refreshing network info cache for port 2357c1a1-6201-44ae-9461-80b6269920a2 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 717.047370] env[63538]: DEBUG oslo_vmware.api [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100681, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.240187} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.047705] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 717.047901] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 717.048095] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 717.048272] env[63538]: INFO nova.compute.manager [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Took 1.12 seconds to destroy the instance on the hypervisor. [ 717.048553] env[63538]: DEBUG oslo.service.loopingcall [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 717.049140] env[63538]: DEBUG nova.compute.manager [-] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 717.049246] env[63538]: DEBUG nova.network.neutron [-] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 717.172207] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100682, 'name': CreateVM_Task, 'duration_secs': 0.496141} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.174729] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 717.175499] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 717.175650] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.176016] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 717.176601] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3cdcae88-8be5-4335-8b32-aa8adb1ab354 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.182208] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Waiting for the task: (returnval){ [ 717.182208] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]529df5ad-77c9-0d3f-5c57-98cc60bf6414" [ 717.182208] env[63538]: _type = "Task" [ 717.182208] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.191436] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]529df5ad-77c9-0d3f-5c57-98cc60bf6414, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.321622] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 080b11d7-a756-45a0-81d5-b5fcc2662ac9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 717.321622] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance bb56950a-3e25-4fb9-9f84-f735e26adc42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 717.443598] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100684, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.108808} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.443865] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 717.444097] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 717.444546] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 717.694709] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]529df5ad-77c9-0d3f-5c57-98cc60bf6414, 'name': SearchDatastore_Task, 'duration_secs': 0.010087} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.698089] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 717.698089] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 717.698089] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 717.698089] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.698346] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 717.698346] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-063147d1-2a95-4445-9031-ce80c0a2eee7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.708144] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 717.708634] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 717.709498] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93385d21-cfbd-4c2a-af09-46bd93decd0a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.718225] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Waiting for the task: (returnval){ [ 717.718225] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5262d54d-930f-6d1d-ad8d-8641875bd8e3" [ 717.718225] env[63538]: _type = "Task" [ 717.718225] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.728615] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5262d54d-930f-6d1d-ad8d-8641875bd8e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.776476] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquiring lock "f9fa5578-acf3-416f-9cb0-8ceb00e5132d" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.778720] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "f9fa5578-acf3-416f-9cb0-8ceb00e5132d" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.778720] env[63538]: INFO nova.compute.manager [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Shelving [ 717.793711] env[63538]: DEBUG nova.compute.manager [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 717.828500] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 4e89aa25-fb4a-430d-ab87-feff57b73780 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 717.834668] env[63538]: DEBUG nova.virt.hardware [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 717.837088] env[63538]: DEBUG nova.virt.hardware [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 717.837454] env[63538]: DEBUG nova.virt.hardware [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 717.837711] env[63538]: DEBUG nova.virt.hardware [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 717.837933] env[63538]: DEBUG nova.virt.hardware [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 717.838374] env[63538]: DEBUG nova.virt.hardware [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 717.838611] env[63538]: DEBUG nova.virt.hardware [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 717.838862] env[63538]: DEBUG nova.virt.hardware [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 717.839558] env[63538]: DEBUG nova.virt.hardware [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 717.839558] env[63538]: DEBUG nova.virt.hardware [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 717.840935] env[63538]: DEBUG nova.virt.hardware [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 717.845265] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fad15b5-ef0a-4882-b3d0-fe9b6b499cda {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.860274] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d33d7e84-1a30-4c0e-ad9b-09e8fd5a8c4e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.891889] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Instance VIF info [] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 717.901850] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Creating folder: Project (fe5ff09029904873ba92aedef47660dd). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 717.902798] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9969f6a3-4179-4be7-9c6f-ef92c94ac8b1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.919156] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Created folder: Project (fe5ff09029904873ba92aedef47660dd) in parent group-v992234. [ 717.919364] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Creating folder: Instances. Parent ref: group-v992330. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 717.919609] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-821c61de-eb1f-4a24-a5d5-1456eb5efd7e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.927434] env[63538]: DEBUG nova.network.neutron [-] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.931992] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Created folder: Instances in parent group-v992330. [ 717.931992] env[63538]: DEBUG oslo.service.loopingcall [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 717.931992] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 717.931992] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-89ad38c8-66ab-459b-af83-4f50cd916694 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.953406] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 717.953406] env[63538]: value = "task-5100687" [ 717.953406] env[63538]: _type = "Task" [ 717.953406] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.962616] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100687, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.050796] env[63538]: DEBUG nova.network.neutron [req-c5a9dd83-2c72-45cf-a878-43a21a95d172 req-2d674558-8691-4573-b6d1-bc33ca90be86 service nova] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Updated VIF entry in instance network info cache for port 2357c1a1-6201-44ae-9461-80b6269920a2. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 718.051103] env[63538]: DEBUG nova.network.neutron [req-c5a9dd83-2c72-45cf-a878-43a21a95d172 req-2d674558-8691-4573-b6d1-bc33ca90be86 service nova] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Updating instance_info_cache with network_info: [{"id": "2357c1a1-6201-44ae-9461-80b6269920a2", "address": "fa:16:3e:c1:2d:ff", "network": {"id": "ad5bb52a-b208-49a5-986c-0bfcdffe7d94", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-907734579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5784127fe9d4eefaa1f55f0eacdb91d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2357c1a1-62", "ovs_interfaceid": "2357c1a1-6201-44ae-9461-80b6269920a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 718.229888] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5262d54d-930f-6d1d-ad8d-8641875bd8e3, 'name': SearchDatastore_Task, 'duration_secs': 0.010589} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.230985] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9052d57e-1e2f-4fcd-91cc-fc490d02f282 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.237010] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Waiting for the task: (returnval){ [ 718.237010] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5289a48f-d529-e151-335d-5020aafa2c45" [ 718.237010] env[63538]: _type = "Task" [ 718.237010] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.249175] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5289a48f-d529-e151-335d-5020aafa2c45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.289974] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 718.290260] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0a9431cd-2041-4519-a679-2031605a5059 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.298921] env[63538]: DEBUG oslo_vmware.api [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 718.298921] env[63538]: value = "task-5100688" [ 718.298921] env[63538]: _type = "Task" [ 718.298921] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.311889] env[63538]: DEBUG oslo_vmware.api [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100688, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.350735] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 736b110e-7265-42cc-9c9b-35f57c466b0c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 718.418598] env[63538]: DEBUG nova.objects.instance [None req-d8df07b7-0679-4c0c-aee6-0128174b4c53 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Lazy-loading 'flavor' on Instance uuid 43729260-d138-4e62-9cc5-4db3ca39f5d2 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 718.432254] env[63538]: INFO nova.compute.manager [-] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Took 1.38 seconds to deallocate network for instance. [ 718.469807] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100687, 'name': CreateVM_Task, 'duration_secs': 0.329482} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.469990] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 718.471790] env[63538]: DEBUG oslo_concurrency.lockutils [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.471790] env[63538]: DEBUG oslo_concurrency.lockutils [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.472380] env[63538]: DEBUG oslo_concurrency.lockutils [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 718.472380] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-985ebd47-99e3-40ba-96a5-aaaa4767e719 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.478360] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Waiting for the task: (returnval){ [ 718.478360] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ae5c58-6988-f481-d482-1261385b9f1a" [ 718.478360] env[63538]: _type = "Task" [ 718.478360] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.489412] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ae5c58-6988-f481-d482-1261385b9f1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.496892] env[63538]: DEBUG nova.virt.hardware [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 718.497381] env[63538]: DEBUG nova.virt.hardware [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 718.497642] env[63538]: DEBUG nova.virt.hardware [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 718.497919] env[63538]: DEBUG nova.virt.hardware [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 718.499782] env[63538]: DEBUG nova.virt.hardware [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 718.499782] env[63538]: DEBUG nova.virt.hardware [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 718.499782] env[63538]: DEBUG nova.virt.hardware [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 718.499782] env[63538]: DEBUG nova.virt.hardware [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 718.499782] env[63538]: DEBUG nova.virt.hardware [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 718.500072] env[63538]: DEBUG nova.virt.hardware [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 718.500072] env[63538]: DEBUG nova.virt.hardware [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 718.500441] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc2b6426-783f-4292-a237-8646359fe7db {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.510367] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb6ba91-b380-49be-98e1-0ecefb8bd7e0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.527877] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Instance VIF info [] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 718.535405] env[63538]: DEBUG oslo.service.loopingcall [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 718.535832] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 718.536187] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-446da5e3-dd82-45c1-b888-9add3383ad9c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.554047] env[63538]: DEBUG oslo_concurrency.lockutils [req-c5a9dd83-2c72-45cf-a878-43a21a95d172 req-2d674558-8691-4573-b6d1-bc33ca90be86 service nova] Releasing lock "refresh_cache-6f29f063-ddb5-491a-a1a0-7c9ed65a1718" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.556565] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 718.556565] env[63538]: value = "task-5100689" [ 718.556565] env[63538]: _type = "Task" [ 718.556565] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.566808] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100689, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.748423] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5289a48f-d529-e151-335d-5020aafa2c45, 'name': SearchDatastore_Task, 'duration_secs': 0.010215} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.748831] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.749245] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 6f29f063-ddb5-491a-a1a0-7c9ed65a1718/6f29f063-ddb5-491a-a1a0-7c9ed65a1718.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 718.749637] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-87153804-4186-4fb7-b28a-100dc8a9b447 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.758318] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Waiting for the task: (returnval){ [ 718.758318] env[63538]: value = "task-5100690" [ 718.758318] env[63538]: _type = "Task" [ 718.758318] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.768222] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100690, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.811635] env[63538]: DEBUG oslo_vmware.api [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100688, 'name': PowerOffVM_Task, 'duration_secs': 0.254771} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.813753] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 718.813753] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-355c1398-8933-4eef-9a3a-639a6b10e09b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.843164] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27561830-d371-4157-8921-077879463057 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.854591] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance e50e95c0-830b-4d71-999b-546b138bf8f4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 718.924346] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d8df07b7-0679-4c0c-aee6-0128174b4c53 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Acquiring lock "refresh_cache-43729260-d138-4e62-9cc5-4db3ca39f5d2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.924539] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d8df07b7-0679-4c0c-aee6-0128174b4c53 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Acquired lock "refresh_cache-43729260-d138-4e62-9cc5-4db3ca39f5d2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.940732] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.994367] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ae5c58-6988-f481-d482-1261385b9f1a, 'name': SearchDatastore_Task, 'duration_secs': 0.025848} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.994367] env[63538]: DEBUG oslo_concurrency.lockutils [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.994367] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 718.994367] env[63538]: DEBUG oslo_concurrency.lockutils [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.994628] env[63538]: DEBUG oslo_concurrency.lockutils [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.994687] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 718.994937] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9aa2df39-46b4-4e69-9cb9-dd692df9078b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.012098] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 719.012329] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 719.014247] env[63538]: DEBUG nova.compute.manager [req-62a9ee2e-98fa-48f0-ab30-78dee0e4d82a req-a4fabc37-2ded-49d4-acae-0d85030e431c service nova] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Received event network-vif-deleted-719964b3-d739-46b8-ae43-c589419299a1 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 719.015067] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-378dd6c8-6f3c-4b81-983c-abe83224764e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.023111] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Waiting for the task: (returnval){ [ 719.023111] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5225bfbd-6655-ad29-90f2-b2b4c87fa10a" [ 719.023111] env[63538]: _type = "Task" [ 719.023111] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.036399] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5225bfbd-6655-ad29-90f2-b2b4c87fa10a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.069611] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100689, 'name': CreateVM_Task, 'duration_secs': 0.335916} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.069786] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 719.070309] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.070479] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.070819] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 719.071150] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1af5190b-2467-469e-8099-e7ba5566d143 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.079322] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Waiting for the task: (returnval){ [ 719.079322] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]528fa21a-4425-9882-4b64-9c9c7f78fe23" [ 719.079322] env[63538]: _type = "Task" [ 719.079322] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.090088] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]528fa21a-4425-9882-4b64-9c9c7f78fe23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.269496] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100690, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.358632] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Creating Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 719.358632] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 04dc612b-7987-405b-9716-95c4ff3535ec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 719.360209] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e0780c7b-75bd-4c23-8f5d-5379a402c200 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.367577] env[63538]: DEBUG oslo_vmware.api [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 719.367577] env[63538]: value = "task-5100691" [ 719.367577] env[63538]: _type = "Task" [ 719.367577] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.378497] env[63538]: DEBUG oslo_vmware.api [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100691, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.429800] env[63538]: DEBUG nova.network.neutron [None req-d8df07b7-0679-4c0c-aee6-0128174b4c53 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 719.538532] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5225bfbd-6655-ad29-90f2-b2b4c87fa10a, 'name': SearchDatastore_Task, 'duration_secs': 0.054946} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.538800] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ce3cc91-db51-4c9e-899e-c6fae0e40d34 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.547876] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Waiting for the task: (returnval){ [ 719.547876] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a381d0-8077-d18b-5261-6290d0b60f6a" [ 719.547876] env[63538]: _type = "Task" [ 719.547876] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.554938] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a381d0-8077-d18b-5261-6290d0b60f6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.591106] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]528fa21a-4425-9882-4b64-9c9c7f78fe23, 'name': SearchDatastore_Task, 'duration_secs': 0.058678} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.591392] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.592049] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 719.592049] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.771040] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100690, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514664} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.771040] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 6f29f063-ddb5-491a-a1a0-7c9ed65a1718/6f29f063-ddb5-491a-a1a0-7c9ed65a1718.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 719.771565] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 719.771565] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c317befc-7ad5-4c07-8278-2542f9fd09f2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.779690] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Waiting for the task: (returnval){ [ 719.779690] env[63538]: value = "task-5100692" [ 719.779690] env[63538]: _type = "Task" [ 719.779690] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.790967] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100692, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.863182] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance db5993ce-6982-4b82-8f5d-3fe51df8896b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 719.877820] env[63538]: DEBUG oslo_vmware.api [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100691, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.060086] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a381d0-8077-d18b-5261-6290d0b60f6a, 'name': SearchDatastore_Task, 'duration_secs': 0.042691} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.060086] env[63538]: DEBUG oslo_concurrency.lockutils [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.060086] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] bb56950a-3e25-4fb9-9f84-f735e26adc42/bb56950a-3e25-4fb9-9f84-f735e26adc42.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 720.060086] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.060273] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 720.060273] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c54cd221-4563-46ab-8402-440eb8af48f5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.062823] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d0c857c9-ed8d-41f7-9bc5-7ed3c444511b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.072025] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Waiting for the task: (returnval){ [ 720.072025] env[63538]: value = "task-5100693" [ 720.072025] env[63538]: _type = "Task" [ 720.072025] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.076019] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 720.076343] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 720.078085] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8627787-6f6e-42bd-a3f9-9b11d16155ad {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.085125] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Task: {'id': task-5100693, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.092160] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Waiting for the task: (returnval){ [ 720.092160] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52de18ff-9af3-94a2-155d-65c8bbf9b583" [ 720.092160] env[63538]: _type = "Task" [ 720.092160] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.101465] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52de18ff-9af3-94a2-155d-65c8bbf9b583, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.295421] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100692, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070122} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.296372] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 720.296586] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e789c0-a1e5-4412-95d4-aefea2565d74 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.320254] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 6f29f063-ddb5-491a-a1a0-7c9ed65a1718/6f29f063-ddb5-491a-a1a0-7c9ed65a1718.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 720.321526] env[63538]: DEBUG nova.network.neutron [None req-d8df07b7-0679-4c0c-aee6-0128174b4c53 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Updating instance_info_cache with network_info: [{"id": "27c8d553-e481-41cf-9f67-20912d2adb46", "address": "fa:16:3e:fb:de:70", "network": {"id": "3f23850b-a1b9-429c-b4c8-e41786ac3f89", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1688699578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94d1cf2838014527bb9c399ae0cff7ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27c8d553-e4", "ovs_interfaceid": "27c8d553-e481-41cf-9f67-20912d2adb46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.322765] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d40bc28-1f3a-45fb-b26c-a294d909d9e8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.344285] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Waiting for the task: (returnval){ [ 720.344285] env[63538]: value = "task-5100694" [ 720.344285] env[63538]: _type = "Task" [ 720.344285] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.355360] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100694, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.366827] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 720.382282] env[63538]: DEBUG oslo_vmware.api [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100691, 'name': CreateSnapshot_Task, 'duration_secs': 0.898046} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.383389] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Created Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 720.384238] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ffeab5d-41a1-497b-afa3-8c694651eb3f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.584274] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Task: {'id': task-5100693, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.604344] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52de18ff-9af3-94a2-155d-65c8bbf9b583, 'name': SearchDatastore_Task, 'duration_secs': 0.022018} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.605316] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-986c8548-0100-4fab-a70f-ac7fb81a7c0b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.615413] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Waiting for the task: (returnval){ [ 720.615413] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a232c4-b58b-3d6f-3670-b80919ba3794" [ 720.615413] env[63538]: _type = "Task" [ 720.615413] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.625177] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a232c4-b58b-3d6f-3670-b80919ba3794, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.838507] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d8df07b7-0679-4c0c-aee6-0128174b4c53 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Releasing lock "refresh_cache-43729260-d138-4e62-9cc5-4db3ca39f5d2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.838786] env[63538]: DEBUG nova.compute.manager [None req-d8df07b7-0679-4c0c-aee6-0128174b4c53 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Inject network info {{(pid=63538) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7264}} [ 720.839036] env[63538]: DEBUG nova.compute.manager [None req-d8df07b7-0679-4c0c-aee6-0128174b4c53 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] network_info to inject: |[{"id": "27c8d553-e481-41cf-9f67-20912d2adb46", "address": "fa:16:3e:fb:de:70", "network": {"id": "3f23850b-a1b9-429c-b4c8-e41786ac3f89", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1688699578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94d1cf2838014527bb9c399ae0cff7ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27c8d553-e4", "ovs_interfaceid": "27c8d553-e481-41cf-9f67-20912d2adb46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7265}} [ 720.844305] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d8df07b7-0679-4c0c-aee6-0128174b4c53 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Reconfiguring VM instance to set the machine id {{(pid=63538) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1802}} [ 720.844738] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-671420c2-b961-4203-a8f4-2a59a7bb123d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.861032] env[63538]: DEBUG nova.objects.instance [None req-b82b485a-de0e-418d-988f-a4da0bbe5f09 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Lazy-loading 'flavor' on Instance uuid 43729260-d138-4e62-9cc5-4db3ca39f5d2 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 720.869059] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100694, 'name': ReconfigVM_Task, 'duration_secs': 0.382657} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.870670] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 6f29f063-ddb5-491a-a1a0-7c9ed65a1718/6f29f063-ddb5-491a-a1a0-7c9ed65a1718.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 720.872116] env[63538]: DEBUG oslo_vmware.api [None req-d8df07b7-0679-4c0c-aee6-0128174b4c53 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Waiting for the task: (returnval){ [ 720.872116] env[63538]: value = "task-5100695" [ 720.872116] env[63538]: _type = "Task" [ 720.872116] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.872116] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8a32e95c-15e3-4c29-bc43-89ee322d8692 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.874572] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 707a79e2-f5db-479c-b719-1e040935cda3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 720.888210] env[63538]: DEBUG oslo_vmware.api [None req-d8df07b7-0679-4c0c-aee6-0128174b4c53 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': task-5100695, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.890084] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Waiting for the task: (returnval){ [ 720.890084] env[63538]: value = "task-5100696" [ 720.890084] env[63538]: _type = "Task" [ 720.890084] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.907682] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Creating linked-clone VM from snapshot {{(pid=63538) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 720.908527] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100696, 'name': Rename_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.908849] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-96a9cc7d-c8bd-402b-a253-f98d7bb52f41 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.918327] env[63538]: DEBUG oslo_vmware.api [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 720.918327] env[63538]: value = "task-5100697" [ 720.918327] env[63538]: _type = "Task" [ 720.918327] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.927615] env[63538]: DEBUG oslo_vmware.api [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100697, 'name': CloneVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.040752] env[63538]: DEBUG nova.compute.manager [req-1ded9c42-b4ca-4ea0-a69c-1cd8fe60e8b5 req-5547bb2c-9db5-4019-81c8-066824f604fa service nova] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Received event network-changed-27c8d553-e481-41cf-9f67-20912d2adb46 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 721.040924] env[63538]: DEBUG nova.compute.manager [req-1ded9c42-b4ca-4ea0-a69c-1cd8fe60e8b5 req-5547bb2c-9db5-4019-81c8-066824f604fa service nova] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Refreshing instance network info cache due to event network-changed-27c8d553-e481-41cf-9f67-20912d2adb46. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 721.041087] env[63538]: DEBUG oslo_concurrency.lockutils [req-1ded9c42-b4ca-4ea0-a69c-1cd8fe60e8b5 req-5547bb2c-9db5-4019-81c8-066824f604fa service nova] Acquiring lock "refresh_cache-43729260-d138-4e62-9cc5-4db3ca39f5d2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.041252] env[63538]: DEBUG oslo_concurrency.lockutils [req-1ded9c42-b4ca-4ea0-a69c-1cd8fe60e8b5 req-5547bb2c-9db5-4019-81c8-066824f604fa service nova] Acquired lock "refresh_cache-43729260-d138-4e62-9cc5-4db3ca39f5d2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.041416] env[63538]: DEBUG nova.network.neutron [req-1ded9c42-b4ca-4ea0-a69c-1cd8fe60e8b5 req-5547bb2c-9db5-4019-81c8-066824f604fa service nova] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Refreshing network info cache for port 27c8d553-e481-41cf-9f67-20912d2adb46 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 721.083262] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Task: {'id': task-5100693, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.622258} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.083894] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] bb56950a-3e25-4fb9-9f84-f735e26adc42/bb56950a-3e25-4fb9-9f84-f735e26adc42.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 721.084161] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 721.084454] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-97ea172e-e33f-4e83-9d74-57ee9a3cf2b5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.092904] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Waiting for the task: (returnval){ [ 721.092904] env[63538]: value = "task-5100698" [ 721.092904] env[63538]: _type = "Task" [ 721.092904] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.103322] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Task: {'id': task-5100698, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.129504] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a232c4-b58b-3d6f-3670-b80919ba3794, 'name': SearchDatastore_Task, 'duration_secs': 0.058043} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.129801] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 721.130067] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] fd650fdc-6b49-4051-8267-bbd1f0cb86f1/fd650fdc-6b49-4051-8267-bbd1f0cb86f1.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 721.130351] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-961d2d37-17e1-4db6-bcab-705e51a09efd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.137795] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Waiting for the task: (returnval){ [ 721.137795] env[63538]: value = "task-5100699" [ 721.137795] env[63538]: _type = "Task" [ 721.137795] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.146688] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100699, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.366189] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b82b485a-de0e-418d-988f-a4da0bbe5f09 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Acquiring lock "refresh_cache-43729260-d138-4e62-9cc5-4db3ca39f5d2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.381779] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 466be7db-79e4-49fd-aa3b-56fbe5c60457 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 721.385955] env[63538]: DEBUG oslo_vmware.api [None req-d8df07b7-0679-4c0c-aee6-0128174b4c53 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': task-5100695, 'name': ReconfigVM_Task, 'duration_secs': 0.382972} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.386535] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d8df07b7-0679-4c0c-aee6-0128174b4c53 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Reconfigured VM instance to set the machine id {{(pid=63538) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1805}} [ 721.402047] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100696, 'name': Rename_Task, 'duration_secs': 0.154138} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.402391] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 721.402766] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b9fb4fe1-ba2f-4e44-a976-989e461eeca9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.414498] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Waiting for the task: (returnval){ [ 721.414498] env[63538]: value = "task-5100700" [ 721.414498] env[63538]: _type = "Task" [ 721.414498] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.426376] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100700, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.432655] env[63538]: DEBUG oslo_vmware.api [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100697, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.604251] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Task: {'id': task-5100698, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082345} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.604695] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 721.605953] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a07ae7f-671e-4595-a9e8-c51b7bf1c6bf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.627568] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] bb56950a-3e25-4fb9-9f84-f735e26adc42/bb56950a-3e25-4fb9-9f84-f735e26adc42.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 721.627852] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b203814c-cdc3-41cf-9276-45a5ccd772ba {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.658816] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100699, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.659160] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Waiting for the task: (returnval){ [ 721.659160] env[63538]: value = "task-5100701" [ 721.659160] env[63538]: _type = "Task" [ 721.659160] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.674030] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Task: {'id': task-5100701, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.887883] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 46e2c1f4-edf7-45d6-ba77-c872005fcf1b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 721.932194] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100700, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.936317] env[63538]: DEBUG oslo_vmware.api [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100697, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.946039] env[63538]: DEBUG nova.network.neutron [req-1ded9c42-b4ca-4ea0-a69c-1cd8fe60e8b5 req-5547bb2c-9db5-4019-81c8-066824f604fa service nova] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Updated VIF entry in instance network info cache for port 27c8d553-e481-41cf-9f67-20912d2adb46. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 721.946131] env[63538]: DEBUG nova.network.neutron [req-1ded9c42-b4ca-4ea0-a69c-1cd8fe60e8b5 req-5547bb2c-9db5-4019-81c8-066824f604fa service nova] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Updating instance_info_cache with network_info: [{"id": "27c8d553-e481-41cf-9f67-20912d2adb46", "address": "fa:16:3e:fb:de:70", "network": {"id": "3f23850b-a1b9-429c-b4c8-e41786ac3f89", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1688699578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94d1cf2838014527bb9c399ae0cff7ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27c8d553-e4", "ovs_interfaceid": "27c8d553-e481-41cf-9f67-20912d2adb46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.156310] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100699, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.957494} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.156570] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] fd650fdc-6b49-4051-8267-bbd1f0cb86f1/fd650fdc-6b49-4051-8267-bbd1f0cb86f1.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 722.156775] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 722.157038] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e1792a92-6b39-4025-a4e1-0829d6ae6108 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.169849] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Waiting for the task: (returnval){ [ 722.169849] env[63538]: value = "task-5100702" [ 722.169849] env[63538]: _type = "Task" [ 722.169849] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.178078] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Task: {'id': task-5100701, 'name': ReconfigVM_Task, 'duration_secs': 0.463235} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.178827] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Reconfigured VM instance instance-00000028 to attach disk [datastore1] bb56950a-3e25-4fb9-9f84-f735e26adc42/bb56950a-3e25-4fb9-9f84-f735e26adc42.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 722.179544] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a1383dab-b435-4b0b-9a5e-2f5f73dee65a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.185382] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100702, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.191238] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Waiting for the task: (returnval){ [ 722.191238] env[63538]: value = "task-5100703" [ 722.191238] env[63538]: _type = "Task" [ 722.191238] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.203132] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Task: {'id': task-5100703, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.395609] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance e4b94aa7-7434-4a6e-b6d3-ed02315c435f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 722.435909] env[63538]: DEBUG oslo_vmware.api [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100697, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.436181] env[63538]: DEBUG oslo_vmware.api [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100700, 'name': PowerOnVM_Task, 'duration_secs': 0.534747} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.436417] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 722.436618] env[63538]: INFO nova.compute.manager [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Took 8.76 seconds to spawn the instance on the hypervisor. [ 722.436789] env[63538]: DEBUG nova.compute.manager [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 722.437552] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f67f52bf-e11b-4374-9f4e-ab9dd4a27eb4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.448542] env[63538]: DEBUG oslo_concurrency.lockutils [req-1ded9c42-b4ca-4ea0-a69c-1cd8fe60e8b5 req-5547bb2c-9db5-4019-81c8-066824f604fa service nova] Releasing lock "refresh_cache-43729260-d138-4e62-9cc5-4db3ca39f5d2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 722.449360] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b82b485a-de0e-418d-988f-a4da0bbe5f09 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Acquired lock "refresh_cache-43729260-d138-4e62-9cc5-4db3ca39f5d2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.680418] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100702, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.147144} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.680748] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 722.681719] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67cb6f2e-3ba6-4068-8c5b-944675a1c666 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.703834] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] fd650fdc-6b49-4051-8267-bbd1f0cb86f1/fd650fdc-6b49-4051-8267-bbd1f0cb86f1.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 722.707061] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c0235e6-8214-47d3-bd70-bedbe8b2b017 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.729766] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Task: {'id': task-5100703, 'name': Rename_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.731320] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Waiting for the task: (returnval){ [ 722.731320] env[63538]: value = "task-5100704" [ 722.731320] env[63538]: _type = "Task" [ 722.731320] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.740865] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100704, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.899581] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance a2e036ae-318b-44ea-9db0-10fa3838728b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 722.933156] env[63538]: DEBUG oslo_vmware.api [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100697, 'name': CloneVM_Task, 'duration_secs': 1.852015} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.933573] env[63538]: DEBUG nova.network.neutron [None req-b82b485a-de0e-418d-988f-a4da0bbe5f09 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 722.934850] env[63538]: INFO nova.virt.vmwareapi.vmops [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Created linked-clone VM from snapshot [ 722.936678] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81645296-fac8-4bd7-8a73-19f38934f489 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.944596] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Uploading image 385b766b-e27c-4c97-87a2-473b5485f688 {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 722.956339] env[63538]: INFO nova.compute.manager [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Took 50.92 seconds to build instance. [ 722.980662] env[63538]: DEBUG oslo_vmware.rw_handles [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 722.980662] env[63538]: value = "vm-992335" [ 722.980662] env[63538]: _type = "VirtualMachine" [ 722.980662] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 722.981361] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6b3cb4bc-4647-4ae4-ae94-bb933120da13 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.992163] env[63538]: DEBUG oslo_vmware.rw_handles [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lease: (returnval){ [ 722.992163] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52861d87-b6a8-96d7-2385-41a3c3566ce5" [ 722.992163] env[63538]: _type = "HttpNfcLease" [ 722.992163] env[63538]: } obtained for exporting VM: (result){ [ 722.992163] env[63538]: value = "vm-992335" [ 722.992163] env[63538]: _type = "VirtualMachine" [ 722.992163] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 722.992163] env[63538]: DEBUG oslo_vmware.api [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the lease: (returnval){ [ 722.992163] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52861d87-b6a8-96d7-2385-41a3c3566ce5" [ 722.992163] env[63538]: _type = "HttpNfcLease" [ 722.992163] env[63538]: } to be ready. {{(pid=63538) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 723.003602] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 723.003602] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52861d87-b6a8-96d7-2385-41a3c3566ce5" [ 723.003602] env[63538]: _type = "HttpNfcLease" [ 723.003602] env[63538]: } is ready. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 723.003883] env[63538]: DEBUG oslo_vmware.rw_handles [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 723.003883] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52861d87-b6a8-96d7-2385-41a3c3566ce5" [ 723.003883] env[63538]: _type = "HttpNfcLease" [ 723.003883] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 723.004651] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1296331f-aa4f-4c6e-9716-450b716d9e94 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.013749] env[63538]: DEBUG oslo_vmware.rw_handles [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5247efbe-146b-062c-04b3-b0d75dd3e8a1/disk-0.vmdk from lease info. {{(pid=63538) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 723.013832] env[63538]: DEBUG oslo_vmware.rw_handles [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5247efbe-146b-062c-04b3-b0d75dd3e8a1/disk-0.vmdk for reading. {{(pid=63538) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 723.115752] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-dfd97101-9440-4143-9cdc-72edd4432f50 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.119483] env[63538]: DEBUG nova.compute.manager [req-7855e4d7-355a-4ace-808e-69f4725f2a73 req-ba2ca053-9e58-4d58-aa36-810f6ea56f36 service nova] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Received event network-changed-27c8d553-e481-41cf-9f67-20912d2adb46 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 723.119689] env[63538]: DEBUG nova.compute.manager [req-7855e4d7-355a-4ace-808e-69f4725f2a73 req-ba2ca053-9e58-4d58-aa36-810f6ea56f36 service nova] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Refreshing instance network info cache due to event network-changed-27c8d553-e481-41cf-9f67-20912d2adb46. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 723.119905] env[63538]: DEBUG oslo_concurrency.lockutils [req-7855e4d7-355a-4ace-808e-69f4725f2a73 req-ba2ca053-9e58-4d58-aa36-810f6ea56f36 service nova] Acquiring lock "refresh_cache-43729260-d138-4e62-9cc5-4db3ca39f5d2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.204955] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Task: {'id': task-5100703, 'name': Rename_Task, 'duration_secs': 0.996937} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.205121] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 723.205518] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-65260b68-fb86-4d18-b885-b8d076e410c4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.211986] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Waiting for the task: (returnval){ [ 723.211986] env[63538]: value = "task-5100706" [ 723.211986] env[63538]: _type = "Task" [ 723.211986] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.220470] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Task: {'id': task-5100706, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.241242] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100704, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.403140] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance d5d557c6-3d4e-4122-8756-218c9757fa01 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 723.457567] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f7f5eeae-54f1-495d-aa62-610f1c9bb325 tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lock "6f29f063-ddb5-491a-a1a0-7c9ed65a1718" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.262s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 723.730385] env[63538]: DEBUG oslo_vmware.api [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Task: {'id': task-5100706, 'name': PowerOnVM_Task, 'duration_secs': 0.492505} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.730937] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 723.731494] env[63538]: INFO nova.compute.manager [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Took 5.94 seconds to spawn the instance on the hypervisor. [ 723.731787] env[63538]: DEBUG nova.compute.manager [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 723.736484] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8668e2d-350e-4395-8036-a19b3fbebc09 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.758641] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100704, 'name': ReconfigVM_Task, 'duration_secs': 0.661293} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.761663] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Reconfigured VM instance instance-00000025 to attach disk [datastore1] fd650fdc-6b49-4051-8267-bbd1f0cb86f1/fd650fdc-6b49-4051-8267-bbd1f0cb86f1.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 723.768635] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f92a7f14-2051-4e88-8cd1-090c293c4f72 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.780907] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Waiting for the task: (returnval){ [ 723.780907] env[63538]: value = "task-5100707" [ 723.780907] env[63538]: _type = "Task" [ 723.780907] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.793271] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100707, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.795074] env[63538]: DEBUG nova.network.neutron [None req-b82b485a-de0e-418d-988f-a4da0bbe5f09 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Updating instance_info_cache with network_info: [{"id": "27c8d553-e481-41cf-9f67-20912d2adb46", "address": "fa:16:3e:fb:de:70", "network": {"id": "3f23850b-a1b9-429c-b4c8-e41786ac3f89", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1688699578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94d1cf2838014527bb9c399ae0cff7ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27c8d553-e4", "ovs_interfaceid": "27c8d553-e481-41cf-9f67-20912d2adb46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.908562] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance f703cd1c-4b77-4a85-a91b-63a2bd0e84a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 723.960842] env[63538]: DEBUG nova.compute.manager [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 724.282067] env[63538]: INFO nova.compute.manager [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Took 49.43 seconds to build instance. [ 724.295795] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100707, 'name': Rename_Task, 'duration_secs': 0.219712} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.296326] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 724.298013] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5e9a4adc-ba53-4044-9a54-6e4ba170a66c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.298821] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b82b485a-de0e-418d-988f-a4da0bbe5f09 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Releasing lock "refresh_cache-43729260-d138-4e62-9cc5-4db3ca39f5d2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.299013] env[63538]: DEBUG nova.compute.manager [None req-b82b485a-de0e-418d-988f-a4da0bbe5f09 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Inject network info {{(pid=63538) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7264}} [ 724.299258] env[63538]: DEBUG nova.compute.manager [None req-b82b485a-de0e-418d-988f-a4da0bbe5f09 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] network_info to inject: |[{"id": "27c8d553-e481-41cf-9f67-20912d2adb46", "address": "fa:16:3e:fb:de:70", "network": {"id": "3f23850b-a1b9-429c-b4c8-e41786ac3f89", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1688699578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94d1cf2838014527bb9c399ae0cff7ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27c8d553-e4", "ovs_interfaceid": "27c8d553-e481-41cf-9f67-20912d2adb46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7265}} [ 724.305166] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b82b485a-de0e-418d-988f-a4da0bbe5f09 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Reconfiguring VM instance to set the machine id {{(pid=63538) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1802}} [ 724.306403] env[63538]: DEBUG oslo_concurrency.lockutils [req-7855e4d7-355a-4ace-808e-69f4725f2a73 req-ba2ca053-9e58-4d58-aa36-810f6ea56f36 service nova] Acquired lock "refresh_cache-43729260-d138-4e62-9cc5-4db3ca39f5d2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.306609] env[63538]: DEBUG nova.network.neutron [req-7855e4d7-355a-4ace-808e-69f4725f2a73 req-ba2ca053-9e58-4d58-aa36-810f6ea56f36 service nova] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Refreshing network info cache for port 27c8d553-e481-41cf-9f67-20912d2adb46 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 724.307819] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2cc1f161-6276-4a28-8ed0-94b75baa7671 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.327434] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Waiting for the task: (returnval){ [ 724.327434] env[63538]: value = "task-5100708" [ 724.327434] env[63538]: _type = "Task" [ 724.327434] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.332922] env[63538]: DEBUG oslo_vmware.api [None req-b82b485a-de0e-418d-988f-a4da0bbe5f09 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Waiting for the task: (returnval){ [ 724.332922] env[63538]: value = "task-5100709" [ 724.332922] env[63538]: _type = "Task" [ 724.332922] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.346420] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100708, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.354677] env[63538]: DEBUG oslo_vmware.api [None req-b82b485a-de0e-418d-988f-a4da0bbe5f09 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': task-5100709, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.412285] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 6850191a-4190-4795-ae18-830b41a76085 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 724.488447] env[63538]: DEBUG oslo_concurrency.lockutils [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.650673] env[63538]: DEBUG nova.network.neutron [req-7855e4d7-355a-4ace-808e-69f4725f2a73 req-ba2ca053-9e58-4d58-aa36-810f6ea56f36 service nova] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Updated VIF entry in instance network info cache for port 27c8d553-e481-41cf-9f67-20912d2adb46. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 724.651580] env[63538]: DEBUG nova.network.neutron [req-7855e4d7-355a-4ace-808e-69f4725f2a73 req-ba2ca053-9e58-4d58-aa36-810f6ea56f36 service nova] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Updating instance_info_cache with network_info: [{"id": "27c8d553-e481-41cf-9f67-20912d2adb46", "address": "fa:16:3e:fb:de:70", "network": {"id": "3f23850b-a1b9-429c-b4c8-e41786ac3f89", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1688699578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94d1cf2838014527bb9c399ae0cff7ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27c8d553-e4", "ovs_interfaceid": "27c8d553-e481-41cf-9f67-20912d2adb46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.790081] env[63538]: DEBUG oslo_concurrency.lockutils [None req-de535256-041b-4d8b-a2d4-729bd41a8c76 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Lock "bb56950a-3e25-4fb9-9f84-f735e26adc42" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.846s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.842685] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100708, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.849119] env[63538]: DEBUG oslo_vmware.api [None req-b82b485a-de0e-418d-988f-a4da0bbe5f09 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': task-5100709, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.915811] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 5bf7ed57-62d5-4abc-96d8-78b979baed92 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 724.916718] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=63538) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 724.916874] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3648MB phys_disk=100GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '16', 'num_vm_active': '14', 'num_task_None': '13', 'num_os_type_None': '16', 'num_proj_7c1f0c999ede418c866074d9276050ff': '3', 'io_workload': '3', 'num_proj_3dc18da1ea704eeaaeb62633c4f76ee8': '1', 'num_proj_6c5e6ed681ed4078bd9115b30f419d9a': '1', 'num_proj_ea05f3fb4676466bb2a286f5a2fefb8f': '1', 'num_proj_f5784127fe9d4eefaa1f55f0eacdb91d': '2', 'num_proj_cdfc96ac41be43f9ba0596444eb75737': '2', 'num_proj_452b39ccca6b4fcba39b1e61f0508f14': '2', 'num_proj_94d1cf2838014527bb9c399ae0cff7ce': '1', 'num_task_deleting': '1', 'num_proj_492427e54e1048f292dab2abdac71af5': '1', 'num_task_rebuilding': '1', 'num_proj_128b2f8fb95d40bda0cf9ea35684928a': '1', 'num_vm_building': '2', 'num_task_spawning': '1', 'num_proj_fe5ff09029904873ba92aedef47660dd': '1'} {{(pid=63538) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 725.156050] env[63538]: DEBUG oslo_concurrency.lockutils [req-7855e4d7-355a-4ace-808e-69f4725f2a73 req-ba2ca053-9e58-4d58-aa36-810f6ea56f36 service nova] Releasing lock "refresh_cache-43729260-d138-4e62-9cc5-4db3ca39f5d2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.296830] env[63538]: DEBUG nova.compute.manager [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 725.347313] env[63538]: DEBUG oslo_vmware.api [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100708, 'name': PowerOnVM_Task, 'duration_secs': 0.581478} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.347976] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 725.348210] env[63538]: DEBUG nova.compute.manager [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 725.349086] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efef6949-e1a2-4d60-9780-e6dfe658ebfe {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.360640] env[63538]: DEBUG oslo_vmware.api [None req-b82b485a-de0e-418d-988f-a4da0bbe5f09 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': task-5100709, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.656586] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28569e0-6399-420c-a86d-bf84c2a9d2f6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.665541] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817b5d11-2edf-4687-babc-980b5460d264 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.705402] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-380060c2-1d69-4b91-a6c2-0498ac2f82a9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.714905] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f55a660-83b7-4746-8472-f5b5fc0a7d77 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.734657] env[63538]: DEBUG nova.compute.provider_tree [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 725.828300] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.851123] env[63538]: DEBUG oslo_vmware.api [None req-b82b485a-de0e-418d-988f-a4da0bbe5f09 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': task-5100709, 'name': ReconfigVM_Task, 'duration_secs': 1.184301} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.851604] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b82b485a-de0e-418d-988f-a4da0bbe5f09 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Reconfigured VM instance to set the machine id {{(pid=63538) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1805}} [ 725.878831] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.901156] env[63538]: DEBUG nova.compute.manager [req-8ea09c1d-719e-4261-ab86-d769e6127de5 req-1c124638-cf18-45cc-8073-0fbe6db19cfa service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Received event network-changed-38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 725.901399] env[63538]: DEBUG nova.compute.manager [req-8ea09c1d-719e-4261-ab86-d769e6127de5 req-1c124638-cf18-45cc-8073-0fbe6db19cfa service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Refreshing instance network info cache due to event network-changed-38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 725.901676] env[63538]: DEBUG oslo_concurrency.lockutils [req-8ea09c1d-719e-4261-ab86-d769e6127de5 req-1c124638-cf18-45cc-8073-0fbe6db19cfa service nova] Acquiring lock "refresh_cache-dbf48807-08a7-46d1-8454-42437a9f87c0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.901829] env[63538]: DEBUG oslo_concurrency.lockutils [req-8ea09c1d-719e-4261-ab86-d769e6127de5 req-1c124638-cf18-45cc-8073-0fbe6db19cfa service nova] Acquired lock "refresh_cache-dbf48807-08a7-46d1-8454-42437a9f87c0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.902019] env[63538]: DEBUG nova.network.neutron [req-8ea09c1d-719e-4261-ab86-d769e6127de5 req-1c124638-cf18-45cc-8073-0fbe6db19cfa service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Refreshing network info cache for port 38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 726.059046] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Acquiring lock "43729260-d138-4e62-9cc5-4db3ca39f5d2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.059409] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Lock "43729260-d138-4e62-9cc5-4db3ca39f5d2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.060431] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Acquiring lock "43729260-d138-4e62-9cc5-4db3ca39f5d2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.060431] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Lock "43729260-d138-4e62-9cc5-4db3ca39f5d2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.060431] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Lock "43729260-d138-4e62-9cc5-4db3ca39f5d2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.063465] env[63538]: INFO nova.compute.manager [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Terminating instance [ 726.065493] env[63538]: DEBUG nova.compute.manager [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 726.065754] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 726.066760] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe23eae-5125-436f-a358-9a8959a37e4e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.075454] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 726.075782] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9e6eac61-2326-4d65-8a10-ecae2ec754d6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.083760] env[63538]: DEBUG oslo_vmware.api [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Waiting for the task: (returnval){ [ 726.083760] env[63538]: value = "task-5100710" [ 726.083760] env[63538]: _type = "Task" [ 726.083760] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.097224] env[63538]: DEBUG oslo_vmware.api [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': task-5100710, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.145223] env[63538]: DEBUG nova.compute.manager [None req-5afb4ff5-6d66-42e4-bfbd-ee06746a216b tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 726.146330] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be9571fa-9b82-4127-9396-03bde88a6368 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.283729] env[63538]: DEBUG nova.scheduler.client.report [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Updated inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 with generation 67 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 726.283729] env[63538]: DEBUG nova.compute.provider_tree [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Updating resource provider f65218a4-1d3d-476a-9093-01cae92c8635 generation from 67 to 68 during operation: update_inventory {{(pid=63538) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 726.283729] env[63538]: DEBUG nova.compute.provider_tree [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 726.597173] env[63538]: DEBUG oslo_vmware.api [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': task-5100710, 'name': PowerOffVM_Task, 'duration_secs': 0.235329} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.597318] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 726.597653] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 726.598364] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cfbd5ca9-53df-4744-8def-6bd75641780b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.667872] env[63538]: INFO nova.compute.manager [None req-5afb4ff5-6d66-42e4-bfbd-ee06746a216b tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] instance snapshotting [ 726.667872] env[63538]: DEBUG nova.objects.instance [None req-5afb4ff5-6d66-42e4-bfbd-ee06746a216b tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Lazy-loading 'flavor' on Instance uuid bb56950a-3e25-4fb9-9f84-f735e26adc42 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 726.672026] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 726.672026] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 726.672026] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Deleting the datastore file [datastore1] 43729260-d138-4e62-9cc5-4db3ca39f5d2 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 726.672026] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-76c78cee-a742-411f-b18e-bc0362a7f433 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.681903] env[63538]: DEBUG oslo_vmware.api [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Waiting for the task: (returnval){ [ 726.681903] env[63538]: value = "task-5100712" [ 726.681903] env[63538]: _type = "Task" [ 726.681903] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.695179] env[63538]: DEBUG oslo_vmware.api [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': task-5100712, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.795032] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63538) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 726.795032] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 11.033s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.795032] env[63538]: DEBUG oslo_concurrency.lockutils [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 46.367s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.795032] env[63538]: INFO nova.compute.claims [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 726.917854] env[63538]: DEBUG nova.network.neutron [req-8ea09c1d-719e-4261-ab86-d769e6127de5 req-1c124638-cf18-45cc-8073-0fbe6db19cfa service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Updated VIF entry in instance network info cache for port 38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 726.917854] env[63538]: DEBUG nova.network.neutron [req-8ea09c1d-719e-4261-ab86-d769e6127de5 req-1c124638-cf18-45cc-8073-0fbe6db19cfa service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Updating instance_info_cache with network_info: [{"id": "38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e", "address": "fa:16:3e:5f:b1:e3", "network": {"id": "ad5bb52a-b208-49a5-986c-0bfcdffe7d94", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-907734579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5784127fe9d4eefaa1f55f0eacdb91d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38f5393e-f3", "ovs_interfaceid": "38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.017152] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Acquiring lock "bb56950a-3e25-4fb9-9f84-f735e26adc42" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.017395] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Lock "bb56950a-3e25-4fb9-9f84-f735e26adc42" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.017613] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Acquiring lock "bb56950a-3e25-4fb9-9f84-f735e26adc42-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.017802] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Lock "bb56950a-3e25-4fb9-9f84-f735e26adc42-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.017972] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Lock "bb56950a-3e25-4fb9-9f84-f735e26adc42-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.023024] env[63538]: INFO nova.compute.manager [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Terminating instance [ 727.024403] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Acquiring lock "refresh_cache-bb56950a-3e25-4fb9-9f84-f735e26adc42" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.024651] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Acquired lock "refresh_cache-bb56950a-3e25-4fb9-9f84-f735e26adc42" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.024855] env[63538]: DEBUG nova.network.neutron [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 727.174873] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d2e44fe-8005-44db-adc6-7aa968831d25 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.197983] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a9329b5-6488-42bf-bc2a-bfb6387748b6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.210210] env[63538]: DEBUG oslo_vmware.api [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Task: {'id': task-5100712, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.241416} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.210720] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 727.210912] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 727.211104] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 727.211363] env[63538]: INFO nova.compute.manager [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Took 1.15 seconds to destroy the instance on the hypervisor. [ 727.211731] env[63538]: DEBUG oslo.service.loopingcall [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 727.212043] env[63538]: DEBUG nova.compute.manager [-] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 727.212202] env[63538]: DEBUG nova.network.neutron [-] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 727.423100] env[63538]: DEBUG oslo_concurrency.lockutils [req-8ea09c1d-719e-4261-ab86-d769e6127de5 req-1c124638-cf18-45cc-8073-0fbe6db19cfa service nova] Releasing lock "refresh_cache-dbf48807-08a7-46d1-8454-42437a9f87c0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.549754] env[63538]: DEBUG nova.network.neutron [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 727.640388] env[63538]: DEBUG nova.network.neutron [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.714231] env[63538]: DEBUG nova.compute.manager [None req-5afb4ff5-6d66-42e4-bfbd-ee06746a216b tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Instance disappeared during snapshot {{(pid=63538) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4495}} [ 727.818558] env[63538]: INFO nova.compute.manager [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Rebuilding instance [ 727.899145] env[63538]: DEBUG nova.compute.manager [None req-5afb4ff5-6d66-42e4-bfbd-ee06746a216b tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Found 0 images (rotation: 2) {{(pid=63538) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 727.926855] env[63538]: DEBUG nova.compute.manager [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 727.927922] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bee72acc-3d2d-4c74-9dbc-a69d1936652c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.156221] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Releasing lock "refresh_cache-bb56950a-3e25-4fb9-9f84-f735e26adc42" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.156721] env[63538]: DEBUG nova.compute.manager [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 728.156928] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 728.158190] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d50e83-a0d4-4153-b126-805440bcb074 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.169685] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 728.169685] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c9052912-e764-48e4-9164-68a794367924 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.182675] env[63538]: DEBUG oslo_vmware.api [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Waiting for the task: (returnval){ [ 728.182675] env[63538]: value = "task-5100713" [ 728.182675] env[63538]: _type = "Task" [ 728.182675] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.193685] env[63538]: DEBUG oslo_vmware.api [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Task: {'id': task-5100713, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.296934] env[63538]: DEBUG nova.network.neutron [-] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.404346] env[63538]: DEBUG nova.compute.manager [req-6d6f4cde-9846-463c-b1c2-d49997005b8c req-7e17401a-25da-41af-9d23-521936c30be2 service nova] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Received event network-vif-deleted-27c8d553-e481-41cf-9f67-20912d2adb46 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 728.445438] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 728.448418] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b3c294e8-f879-48f9-a30e-d15a5bb04208 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.460283] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Waiting for the task: (returnval){ [ 728.460283] env[63538]: value = "task-5100714" [ 728.460283] env[63538]: _type = "Task" [ 728.460283] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.470647] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Task: {'id': task-5100714, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.493585] env[63538]: DEBUG nova.compute.manager [req-fe7b19e3-2532-41e2-b40e-1f5c598351ca req-50e01532-24b9-4665-a35a-7c9a35195c7f service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Received event network-changed-38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 728.493789] env[63538]: DEBUG nova.compute.manager [req-fe7b19e3-2532-41e2-b40e-1f5c598351ca req-50e01532-24b9-4665-a35a-7c9a35195c7f service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Refreshing instance network info cache due to event network-changed-38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 728.494258] env[63538]: DEBUG oslo_concurrency.lockutils [req-fe7b19e3-2532-41e2-b40e-1f5c598351ca req-50e01532-24b9-4665-a35a-7c9a35195c7f service nova] Acquiring lock "refresh_cache-dbf48807-08a7-46d1-8454-42437a9f87c0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.494258] env[63538]: DEBUG oslo_concurrency.lockutils [req-fe7b19e3-2532-41e2-b40e-1f5c598351ca req-50e01532-24b9-4665-a35a-7c9a35195c7f service nova] Acquired lock "refresh_cache-dbf48807-08a7-46d1-8454-42437a9f87c0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.494258] env[63538]: DEBUG nova.network.neutron [req-fe7b19e3-2532-41e2-b40e-1f5c598351ca req-50e01532-24b9-4665-a35a-7c9a35195c7f service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Refreshing network info cache for port 38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 728.507690] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b737b6e4-5afa-40b8-9318-e088555ccac8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.520201] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf69ecfd-5656-499b-b0f8-56d2baadea59 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.556025] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5794cbf5-e247-4001-915a-085bbb0fb04b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.562481] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a39dcc-98af-49bc-a858-d3a54c9b2529 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.578533] env[63538]: DEBUG nova.compute.provider_tree [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 728.693402] env[63538]: DEBUG oslo_vmware.api [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Task: {'id': task-5100713, 'name': PowerOffVM_Task, 'duration_secs': 0.159013} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.693735] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 728.693965] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 728.694342] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-591b0b9e-7af5-4755-8442-6c3dfda8d352 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.722775] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 728.722775] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 728.722775] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Deleting the datastore file [datastore1] bb56950a-3e25-4fb9-9f84-f735e26adc42 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 728.722775] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-17738a2a-497f-4bc9-a585-f14b64486d5a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.730937] env[63538]: DEBUG oslo_vmware.api [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Waiting for the task: (returnval){ [ 728.730937] env[63538]: value = "task-5100716" [ 728.730937] env[63538]: _type = "Task" [ 728.730937] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.741531] env[63538]: DEBUG oslo_vmware.api [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Task: {'id': task-5100716, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.799901] env[63538]: INFO nova.compute.manager [-] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Took 1.59 seconds to deallocate network for instance. [ 728.971615] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Task: {'id': task-5100714, 'name': PowerOffVM_Task, 'duration_secs': 0.231676} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.972138] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 728.972488] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 728.973397] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc53ea2c-550e-43a7-b049-66e3b2fbea2c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.987865] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 728.988422] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6defa39-ec2e-4555-9c03-377c70c644a6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.017709] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 729.019100] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 729.019658] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Deleting the datastore file [datastore1] fd650fdc-6b49-4051-8267-bbd1f0cb86f1 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 729.020025] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-19f4ed2c-b7f5-423a-8345-0ddcc42099ea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.032028] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Waiting for the task: (returnval){ [ 729.032028] env[63538]: value = "task-5100718" [ 729.032028] env[63538]: _type = "Task" [ 729.032028] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.041757] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Task: {'id': task-5100718, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.119532] env[63538]: DEBUG nova.scheduler.client.report [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Updated inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 with generation 68 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 729.119804] env[63538]: DEBUG nova.compute.provider_tree [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Updating resource provider f65218a4-1d3d-476a-9093-01cae92c8635 generation from 68 to 69 during operation: update_inventory {{(pid=63538) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 729.119990] env[63538]: DEBUG nova.compute.provider_tree [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 729.245081] env[63538]: DEBUG oslo_vmware.api [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Task: {'id': task-5100716, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.245457] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 729.245635] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 729.245811] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 729.245986] env[63538]: INFO nova.compute.manager [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Took 1.09 seconds to destroy the instance on the hypervisor. [ 729.248069] env[63538]: DEBUG oslo.service.loopingcall [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 729.248069] env[63538]: DEBUG nova.compute.manager [-] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 729.248069] env[63538]: DEBUG nova.network.neutron [-] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 729.301596] env[63538]: DEBUG nova.network.neutron [req-fe7b19e3-2532-41e2-b40e-1f5c598351ca req-50e01532-24b9-4665-a35a-7c9a35195c7f service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Updated VIF entry in instance network info cache for port 38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 729.302019] env[63538]: DEBUG nova.network.neutron [req-fe7b19e3-2532-41e2-b40e-1f5c598351ca req-50e01532-24b9-4665-a35a-7c9a35195c7f service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Updating instance_info_cache with network_info: [{"id": "38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e", "address": "fa:16:3e:5f:b1:e3", "network": {"id": "ad5bb52a-b208-49a5-986c-0bfcdffe7d94", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-907734579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5784127fe9d4eefaa1f55f0eacdb91d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38f5393e-f3", "ovs_interfaceid": "38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.307817] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.389346] env[63538]: DEBUG nova.network.neutron [-] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 729.540821] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Task: {'id': task-5100718, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14166} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.541110] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 729.542149] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 729.542149] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 729.631978] env[63538]: DEBUG oslo_concurrency.lockutils [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.839s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.632628] env[63538]: DEBUG nova.compute.manager [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 729.635713] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 47.639s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.637210] env[63538]: INFO nova.compute.claims [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 729.806719] env[63538]: DEBUG oslo_concurrency.lockutils [req-fe7b19e3-2532-41e2-b40e-1f5c598351ca req-50e01532-24b9-4665-a35a-7c9a35195c7f service nova] Releasing lock "refresh_cache-dbf48807-08a7-46d1-8454-42437a9f87c0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.807035] env[63538]: DEBUG nova.compute.manager [req-fe7b19e3-2532-41e2-b40e-1f5c598351ca req-50e01532-24b9-4665-a35a-7c9a35195c7f service nova] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Received event network-changed-2357c1a1-6201-44ae-9461-80b6269920a2 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 729.807187] env[63538]: DEBUG nova.compute.manager [req-fe7b19e3-2532-41e2-b40e-1f5c598351ca req-50e01532-24b9-4665-a35a-7c9a35195c7f service nova] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Refreshing instance network info cache due to event network-changed-2357c1a1-6201-44ae-9461-80b6269920a2. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 729.807925] env[63538]: DEBUG oslo_concurrency.lockutils [req-fe7b19e3-2532-41e2-b40e-1f5c598351ca req-50e01532-24b9-4665-a35a-7c9a35195c7f service nova] Acquiring lock "refresh_cache-6f29f063-ddb5-491a-a1a0-7c9ed65a1718" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.807925] env[63538]: DEBUG oslo_concurrency.lockutils [req-fe7b19e3-2532-41e2-b40e-1f5c598351ca req-50e01532-24b9-4665-a35a-7c9a35195c7f service nova] Acquired lock "refresh_cache-6f29f063-ddb5-491a-a1a0-7c9ed65a1718" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.807925] env[63538]: DEBUG nova.network.neutron [req-fe7b19e3-2532-41e2-b40e-1f5c598351ca req-50e01532-24b9-4665-a35a-7c9a35195c7f service nova] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Refreshing network info cache for port 2357c1a1-6201-44ae-9461-80b6269920a2 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 729.897305] env[63538]: DEBUG nova.network.neutron [-] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.148883] env[63538]: DEBUG nova.compute.utils [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 730.150233] env[63538]: DEBUG nova.compute.manager [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 730.150423] env[63538]: DEBUG nova.network.neutron [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 730.209238] env[63538]: DEBUG nova.policy [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e153308b0ce242949aebfa9135eb1966', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8342a170e0f54deea52260c8e741e891', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 730.405474] env[63538]: INFO nova.compute.manager [-] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Took 1.16 seconds to deallocate network for instance. [ 730.588904] env[63538]: DEBUG nova.virt.hardware [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 730.589177] env[63538]: DEBUG nova.virt.hardware [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 730.589391] env[63538]: DEBUG nova.virt.hardware [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 730.589600] env[63538]: DEBUG nova.virt.hardware [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 730.589687] env[63538]: DEBUG nova.virt.hardware [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 730.589841] env[63538]: DEBUG nova.virt.hardware [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 730.590080] env[63538]: DEBUG nova.virt.hardware [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 730.590248] env[63538]: DEBUG nova.virt.hardware [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 730.590475] env[63538]: DEBUG nova.virt.hardware [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 730.590655] env[63538]: DEBUG nova.virt.hardware [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 730.590759] env[63538]: DEBUG nova.virt.hardware [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 730.591848] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15923e24-3a12-4345-b263-13b827012b06 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.602809] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71cbbcd2-2f30-457a-bafc-96e541e78e6a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.618189] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Instance VIF info [] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 730.625163] env[63538]: DEBUG oslo.service.loopingcall [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 730.625803] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 730.625917] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-93016683-0945-47a9-8dee-b64dd66117cb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.649925] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 730.649925] env[63538]: value = "task-5100719" [ 730.649925] env[63538]: _type = "Task" [ 730.649925] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.659807] env[63538]: DEBUG nova.compute.manager [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 730.671235] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100719, 'name': CreateVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.681276] env[63538]: DEBUG nova.network.neutron [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Successfully created port: 8dcd3b3f-b4cf-4491-b430-9ef54588c908 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 730.698888] env[63538]: DEBUG oslo_concurrency.lockutils [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquiring lock "6f29f063-ddb5-491a-a1a0-7c9ed65a1718" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.699135] env[63538]: DEBUG oslo_concurrency.lockutils [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lock "6f29f063-ddb5-491a-a1a0-7c9ed65a1718" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.702863] env[63538]: DEBUG oslo_concurrency.lockutils [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquiring lock "6f29f063-ddb5-491a-a1a0-7c9ed65a1718-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.702863] env[63538]: DEBUG oslo_concurrency.lockutils [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lock "6f29f063-ddb5-491a-a1a0-7c9ed65a1718-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.702863] env[63538]: DEBUG oslo_concurrency.lockutils [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lock "6f29f063-ddb5-491a-a1a0-7c9ed65a1718-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.702863] env[63538]: INFO nova.compute.manager [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Terminating instance [ 730.705394] env[63538]: DEBUG nova.compute.manager [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 730.705658] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 730.708320] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f358bc-8c83-4cdb-bafd-3ed2bba0d61c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.719597] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 730.719942] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26b01dec-d21e-41ad-b1dc-9f35f75c503e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.728020] env[63538]: DEBUG oslo_vmware.api [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Waiting for the task: (returnval){ [ 730.728020] env[63538]: value = "task-5100720" [ 730.728020] env[63538]: _type = "Task" [ 730.728020] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.737722] env[63538]: DEBUG oslo_vmware.api [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100720, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.775885] env[63538]: DEBUG nova.compute.manager [req-8a4af633-9e65-4e3f-83c1-0da308c8e3d4 req-52243b46-f4fa-413f-95b1-4a1f5d583f15 service nova] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Received event network-changed-2357c1a1-6201-44ae-9461-80b6269920a2 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 730.776155] env[63538]: DEBUG nova.compute.manager [req-8a4af633-9e65-4e3f-83c1-0da308c8e3d4 req-52243b46-f4fa-413f-95b1-4a1f5d583f15 service nova] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Refreshing instance network info cache due to event network-changed-2357c1a1-6201-44ae-9461-80b6269920a2. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 730.776469] env[63538]: DEBUG oslo_concurrency.lockutils [req-8a4af633-9e65-4e3f-83c1-0da308c8e3d4 req-52243b46-f4fa-413f-95b1-4a1f5d583f15 service nova] Acquiring lock "refresh_cache-6f29f063-ddb5-491a-a1a0-7c9ed65a1718" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.919933] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.002845] env[63538]: DEBUG nova.network.neutron [req-fe7b19e3-2532-41e2-b40e-1f5c598351ca req-50e01532-24b9-4665-a35a-7c9a35195c7f service nova] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Updated VIF entry in instance network info cache for port 2357c1a1-6201-44ae-9461-80b6269920a2. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 731.003270] env[63538]: DEBUG nova.network.neutron [req-fe7b19e3-2532-41e2-b40e-1f5c598351ca req-50e01532-24b9-4665-a35a-7c9a35195c7f service nova] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Updating instance_info_cache with network_info: [{"id": "2357c1a1-6201-44ae-9461-80b6269920a2", "address": "fa:16:3e:c1:2d:ff", "network": {"id": "ad5bb52a-b208-49a5-986c-0bfcdffe7d94", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-907734579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5784127fe9d4eefaa1f55f0eacdb91d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2357c1a1-62", "ovs_interfaceid": "2357c1a1-6201-44ae-9461-80b6269920a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.164960] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100719, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.246325] env[63538]: DEBUG oslo_vmware.api [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100720, 'name': PowerOffVM_Task, 'duration_secs': 0.274349} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.250687] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 731.251038] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 731.252201] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-185aac12-6e88-4ee7-97fb-4e90b5c5c313 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.326959] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff9fdbeb-cd38-404d-88cf-bbbde8dcf0e3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.331773] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 731.332143] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 731.332471] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Deleting the datastore file [datastore1] 6f29f063-ddb5-491a-a1a0-7c9ed65a1718 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 731.333207] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8f0a695e-f259-47d2-a6c0-2570300bda9a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.339490] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b698e187-bcb0-47a8-977e-08a3284cce92 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.344649] env[63538]: DEBUG oslo_vmware.api [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Waiting for the task: (returnval){ [ 731.344649] env[63538]: value = "task-5100722" [ 731.344649] env[63538]: _type = "Task" [ 731.344649] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.379177] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66b3001-52fc-4f05-a289-7f935210dadc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.385848] env[63538]: DEBUG oslo_vmware.api [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100722, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.391958] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae742af-a93a-4bbd-9dce-020c77dc0f1d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.412858] env[63538]: DEBUG nova.compute.provider_tree [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 731.474533] env[63538]: DEBUG oslo_vmware.rw_handles [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5247efbe-146b-062c-04b3-b0d75dd3e8a1/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 731.475678] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66426d9b-ee8e-45d2-95f9-d5744f142f0b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.483442] env[63538]: DEBUG oslo_vmware.rw_handles [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5247efbe-146b-062c-04b3-b0d75dd3e8a1/disk-0.vmdk is in state: ready. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 731.483662] env[63538]: ERROR oslo_vmware.rw_handles [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5247efbe-146b-062c-04b3-b0d75dd3e8a1/disk-0.vmdk due to incomplete transfer. [ 731.483932] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-62587d13-9985-43c6-b84a-c71b612572aa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.492395] env[63538]: DEBUG oslo_vmware.rw_handles [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5247efbe-146b-062c-04b3-b0d75dd3e8a1/disk-0.vmdk. {{(pid=63538) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 731.492626] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Uploaded image 385b766b-e27c-4c97-87a2-473b5485f688 to the Glance image server {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 731.495500] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Destroying the VM {{(pid=63538) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 731.496325] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e05fd439-7450-4dbe-9bfb-eef9b9077d6c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.504139] env[63538]: DEBUG oslo_vmware.api [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 731.504139] env[63538]: value = "task-5100723" [ 731.504139] env[63538]: _type = "Task" [ 731.504139] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.508972] env[63538]: DEBUG oslo_concurrency.lockutils [req-fe7b19e3-2532-41e2-b40e-1f5c598351ca req-50e01532-24b9-4665-a35a-7c9a35195c7f service nova] Releasing lock "refresh_cache-6f29f063-ddb5-491a-a1a0-7c9ed65a1718" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.509439] env[63538]: DEBUG oslo_concurrency.lockutils [req-8a4af633-9e65-4e3f-83c1-0da308c8e3d4 req-52243b46-f4fa-413f-95b1-4a1f5d583f15 service nova] Acquired lock "refresh_cache-6f29f063-ddb5-491a-a1a0-7c9ed65a1718" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.509647] env[63538]: DEBUG nova.network.neutron [req-8a4af633-9e65-4e3f-83c1-0da308c8e3d4 req-52243b46-f4fa-413f-95b1-4a1f5d583f15 service nova] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Refreshing network info cache for port 2357c1a1-6201-44ae-9461-80b6269920a2 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 731.516841] env[63538]: DEBUG oslo_vmware.api [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100723, 'name': Destroy_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.663699] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100719, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.673188] env[63538]: DEBUG nova.compute.manager [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 731.704469] env[63538]: DEBUG nova.virt.hardware [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 731.704768] env[63538]: DEBUG nova.virt.hardware [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 731.704943] env[63538]: DEBUG nova.virt.hardware [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 731.705704] env[63538]: DEBUG nova.virt.hardware [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 731.705915] env[63538]: DEBUG nova.virt.hardware [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 731.706096] env[63538]: DEBUG nova.virt.hardware [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 731.706354] env[63538]: DEBUG nova.virt.hardware [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 731.706529] env[63538]: DEBUG nova.virt.hardware [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 731.706732] env[63538]: DEBUG nova.virt.hardware [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 731.706907] env[63538]: DEBUG nova.virt.hardware [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 731.707100] env[63538]: DEBUG nova.virt.hardware [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 731.708045] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add718eb-63f7-4e97-82cb-0dc2d915ab10 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.717756] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b7b210-b97d-4b76-a0b2-4c7d9096479a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.857587] env[63538]: DEBUG oslo_vmware.api [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100722, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162865} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.858303] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 731.858847] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 731.859291] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 731.859739] env[63538]: INFO nova.compute.manager [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Took 1.15 seconds to destroy the instance on the hypervisor. [ 731.860286] env[63538]: DEBUG oslo.service.loopingcall [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 731.860738] env[63538]: DEBUG nova.compute.manager [-] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 731.861037] env[63538]: DEBUG nova.network.neutron [-] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 731.919026] env[63538]: DEBUG nova.scheduler.client.report [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 732.017845] env[63538]: DEBUG oslo_vmware.api [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100723, 'name': Destroy_Task, 'duration_secs': 0.374654} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.018166] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Destroyed the VM [ 732.018500] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Deleting Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 732.020398] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-748decab-9c37-450a-b8ce-70891f7db10e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.027970] env[63538]: DEBUG oslo_vmware.api [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 732.027970] env[63538]: value = "task-5100724" [ 732.027970] env[63538]: _type = "Task" [ 732.027970] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.041226] env[63538]: DEBUG oslo_vmware.api [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100724, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.164658] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100719, 'name': CreateVM_Task, 'duration_secs': 1.332212} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.167645] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 732.167768] env[63538]: DEBUG oslo_concurrency.lockutils [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.167959] env[63538]: DEBUG oslo_concurrency.lockutils [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.168679] env[63538]: DEBUG oslo_concurrency.lockutils [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 732.168896] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a163f2b-b0f1-4396-9794-3b9c83e2c3da {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.178236] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Waiting for the task: (returnval){ [ 732.178236] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52155037-3155-e093-2a73-655b1478dd16" [ 732.178236] env[63538]: _type = "Task" [ 732.178236] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.190789] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52155037-3155-e093-2a73-655b1478dd16, 'name': SearchDatastore_Task, 'duration_secs': 0.010728} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.191142] env[63538]: DEBUG oslo_concurrency.lockutils [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 732.191427] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 732.191688] env[63538]: DEBUG oslo_concurrency.lockutils [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.191848] env[63538]: DEBUG oslo_concurrency.lockutils [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.192046] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 732.192337] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5b16b3b8-37e2-4153-b29f-ed0e768c97e9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.209225] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 732.209225] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 732.210227] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0599496c-6b6d-49ac-9560-47575cdb52d9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.220242] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Waiting for the task: (returnval){ [ 732.220242] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524752d9-4aa1-fe24-4498-0bbad95dac4c" [ 732.220242] env[63538]: _type = "Task" [ 732.220242] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.230896] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524752d9-4aa1-fe24-4498-0bbad95dac4c, 'name': SearchDatastore_Task, 'duration_secs': 0.011127} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.232257] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbced1fc-423d-4498-b9e6-4da510256d0f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.238315] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Waiting for the task: (returnval){ [ 732.238315] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5240a682-fc9f-29fd-2636-e5c07c6be6eb" [ 732.238315] env[63538]: _type = "Task" [ 732.238315] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.252869] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5240a682-fc9f-29fd-2636-e5c07c6be6eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.425741] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.790s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.426352] env[63538]: DEBUG nova.compute.manager [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 732.432352] env[63538]: DEBUG oslo_concurrency.lockutils [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 49.645s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.436027] env[63538]: DEBUG oslo_concurrency.lockutils [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.436027] env[63538]: DEBUG oslo_concurrency.lockutils [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 48.671s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.436413] env[63538]: INFO nova.compute.claims [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 732.467256] env[63538]: INFO nova.scheduler.client.report [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Deleted allocations for instance ee9fe572-7a17-46db-8330-4b6f632c6b2c [ 732.494829] env[63538]: DEBUG nova.network.neutron [req-8a4af633-9e65-4e3f-83c1-0da308c8e3d4 req-52243b46-f4fa-413f-95b1-4a1f5d583f15 service nova] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Updated VIF entry in instance network info cache for port 2357c1a1-6201-44ae-9461-80b6269920a2. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 732.494829] env[63538]: DEBUG nova.network.neutron [req-8a4af633-9e65-4e3f-83c1-0da308c8e3d4 req-52243b46-f4fa-413f-95b1-4a1f5d583f15 service nova] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Updating instance_info_cache with network_info: [{"id": "2357c1a1-6201-44ae-9461-80b6269920a2", "address": "fa:16:3e:c1:2d:ff", "network": {"id": "ad5bb52a-b208-49a5-986c-0bfcdffe7d94", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-907734579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5784127fe9d4eefaa1f55f0eacdb91d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2357c1a1-62", "ovs_interfaceid": "2357c1a1-6201-44ae-9461-80b6269920a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.541306] env[63538]: DEBUG oslo_vmware.api [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100724, 'name': RemoveSnapshot_Task, 'duration_secs': 0.404744} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.541613] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Deleted Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 732.541880] env[63538]: DEBUG nova.compute.manager [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 732.542862] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61943c9c-c657-4054-8277-1c5a4a6af2cc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.622708] env[63538]: DEBUG nova.network.neutron [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Successfully updated port: 8dcd3b3f-b4cf-4491-b430-9ef54588c908 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 732.630897] env[63538]: DEBUG nova.compute.manager [req-bc08d60e-8cd1-4188-a557-cfaa304bee50 req-4642c4d3-0bee-414e-ab29-ce3a1ca09e67 service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Received event network-changed-38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 732.631483] env[63538]: DEBUG nova.compute.manager [req-bc08d60e-8cd1-4188-a557-cfaa304bee50 req-4642c4d3-0bee-414e-ab29-ce3a1ca09e67 service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Refreshing instance network info cache due to event network-changed-38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 732.631762] env[63538]: DEBUG oslo_concurrency.lockutils [req-bc08d60e-8cd1-4188-a557-cfaa304bee50 req-4642c4d3-0bee-414e-ab29-ce3a1ca09e67 service nova] Acquiring lock "refresh_cache-dbf48807-08a7-46d1-8454-42437a9f87c0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.631762] env[63538]: DEBUG oslo_concurrency.lockutils [req-bc08d60e-8cd1-4188-a557-cfaa304bee50 req-4642c4d3-0bee-414e-ab29-ce3a1ca09e67 service nova] Acquired lock "refresh_cache-dbf48807-08a7-46d1-8454-42437a9f87c0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.631975] env[63538]: DEBUG nova.network.neutron [req-bc08d60e-8cd1-4188-a557-cfaa304bee50 req-4642c4d3-0bee-414e-ab29-ce3a1ca09e67 service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Refreshing network info cache for port 38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 732.749658] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5240a682-fc9f-29fd-2636-e5c07c6be6eb, 'name': SearchDatastore_Task, 'duration_secs': 0.022062} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.750146] env[63538]: DEBUG oslo_concurrency.lockutils [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 732.750642] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] fd650fdc-6b49-4051-8267-bbd1f0cb86f1/fd650fdc-6b49-4051-8267-bbd1f0cb86f1.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 732.751081] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-38fed89f-25dd-4be1-98eb-659201207b06 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.759930] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Waiting for the task: (returnval){ [ 732.759930] env[63538]: value = "task-5100725" [ 732.759930] env[63538]: _type = "Task" [ 732.759930] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.769879] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Task: {'id': task-5100725, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.944227] env[63538]: DEBUG nova.compute.utils [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 732.948610] env[63538]: DEBUG nova.network.neutron [-] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.950438] env[63538]: DEBUG nova.compute.manager [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 732.950438] env[63538]: DEBUG nova.network.neutron [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 732.978047] env[63538]: DEBUG oslo_concurrency.lockutils [None req-620af94e-cfcb-4b0b-896b-7d5f68f611ce tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "ee9fe572-7a17-46db-8330-4b6f632c6b2c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 53.189s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.998220] env[63538]: DEBUG oslo_concurrency.lockutils [req-8a4af633-9e65-4e3f-83c1-0da308c8e3d4 req-52243b46-f4fa-413f-95b1-4a1f5d583f15 service nova] Releasing lock "refresh_cache-6f29f063-ddb5-491a-a1a0-7c9ed65a1718" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 733.041837] env[63538]: DEBUG nova.policy [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '45e8439118864d019605dc225865e5ca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e433921a148d427dbe349fc59afead0a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 733.057182] env[63538]: INFO nova.compute.manager [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Shelve offloading [ 733.063117] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 733.063439] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-adbc9c45-8208-42af-934f-c0221085cd9f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.072984] env[63538]: DEBUG oslo_vmware.api [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 733.072984] env[63538]: value = "task-5100726" [ 733.072984] env[63538]: _type = "Task" [ 733.072984] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.084086] env[63538]: DEBUG oslo_vmware.api [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100726, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.125938] env[63538]: DEBUG oslo_concurrency.lockutils [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Acquiring lock "refresh_cache-080b11d7-a756-45a0-81d5-b5fcc2662ac9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.126113] env[63538]: DEBUG oslo_concurrency.lockutils [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Acquired lock "refresh_cache-080b11d7-a756-45a0-81d5-b5fcc2662ac9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.126288] env[63538]: DEBUG nova.network.neutron [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 733.193593] env[63538]: DEBUG nova.compute.manager [req-8a9c718a-b309-4ca8-8da8-e0fd150bb4d6 req-4e6c12ad-5686-4670-a614-b1077e47f896 service nova] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Received event network-vif-plugged-8dcd3b3f-b4cf-4491-b430-9ef54588c908 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 733.193593] env[63538]: DEBUG oslo_concurrency.lockutils [req-8a9c718a-b309-4ca8-8da8-e0fd150bb4d6 req-4e6c12ad-5686-4670-a614-b1077e47f896 service nova] Acquiring lock "080b11d7-a756-45a0-81d5-b5fcc2662ac9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.194724] env[63538]: DEBUG oslo_concurrency.lockutils [req-8a9c718a-b309-4ca8-8da8-e0fd150bb4d6 req-4e6c12ad-5686-4670-a614-b1077e47f896 service nova] Lock "080b11d7-a756-45a0-81d5-b5fcc2662ac9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.194724] env[63538]: DEBUG oslo_concurrency.lockutils [req-8a9c718a-b309-4ca8-8da8-e0fd150bb4d6 req-4e6c12ad-5686-4670-a614-b1077e47f896 service nova] Lock "080b11d7-a756-45a0-81d5-b5fcc2662ac9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.194724] env[63538]: DEBUG nova.compute.manager [req-8a9c718a-b309-4ca8-8da8-e0fd150bb4d6 req-4e6c12ad-5686-4670-a614-b1077e47f896 service nova] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] No waiting events found dispatching network-vif-plugged-8dcd3b3f-b4cf-4491-b430-9ef54588c908 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 733.194724] env[63538]: WARNING nova.compute.manager [req-8a9c718a-b309-4ca8-8da8-e0fd150bb4d6 req-4e6c12ad-5686-4670-a614-b1077e47f896 service nova] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Received unexpected event network-vif-plugged-8dcd3b3f-b4cf-4491-b430-9ef54588c908 for instance with vm_state building and task_state spawning. [ 733.194724] env[63538]: DEBUG nova.compute.manager [req-8a9c718a-b309-4ca8-8da8-e0fd150bb4d6 req-4e6c12ad-5686-4670-a614-b1077e47f896 service nova] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Received event network-changed-8dcd3b3f-b4cf-4491-b430-9ef54588c908 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 733.194724] env[63538]: DEBUG nova.compute.manager [req-8a9c718a-b309-4ca8-8da8-e0fd150bb4d6 req-4e6c12ad-5686-4670-a614-b1077e47f896 service nova] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Refreshing instance network info cache due to event network-changed-8dcd3b3f-b4cf-4491-b430-9ef54588c908. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 733.194892] env[63538]: DEBUG oslo_concurrency.lockutils [req-8a9c718a-b309-4ca8-8da8-e0fd150bb4d6 req-4e6c12ad-5686-4670-a614-b1077e47f896 service nova] Acquiring lock "refresh_cache-080b11d7-a756-45a0-81d5-b5fcc2662ac9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.271993] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Task: {'id': task-5100725, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490267} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.275046] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] fd650fdc-6b49-4051-8267-bbd1f0cb86f1/fd650fdc-6b49-4051-8267-bbd1f0cb86f1.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 733.275046] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 733.275046] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f1d4add0-05a7-42a1-8bcc-52a236afce81 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.281292] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Waiting for the task: (returnval){ [ 733.281292] env[63538]: value = "task-5100727" [ 733.281292] env[63538]: _type = "Task" [ 733.281292] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.290760] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Task: {'id': task-5100727, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.453258] env[63538]: DEBUG nova.compute.manager [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 733.461161] env[63538]: INFO nova.compute.manager [-] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Took 1.60 seconds to deallocate network for instance. [ 733.593455] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] VM already powered off {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 733.593856] env[63538]: DEBUG nova.compute.manager [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 733.597408] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b7a777d-3c6a-4cb7-bd30-66391e104a5f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.608976] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquiring lock "refresh_cache-f9fa5578-acf3-416f-9cb0-8ceb00e5132d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.609180] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquired lock "refresh_cache-f9fa5578-acf3-416f-9cb0-8ceb00e5132d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.609360] env[63538]: DEBUG nova.network.neutron [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 733.669414] env[63538]: DEBUG nova.network.neutron [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 733.728143] env[63538]: DEBUG nova.network.neutron [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Successfully created port: 61a2d4d3-83f2-4f4e-8217-ad2abe2975b9 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 733.742980] env[63538]: DEBUG nova.network.neutron [req-bc08d60e-8cd1-4188-a557-cfaa304bee50 req-4642c4d3-0bee-414e-ab29-ce3a1ca09e67 service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Updated VIF entry in instance network info cache for port 38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 733.743351] env[63538]: DEBUG nova.network.neutron [req-bc08d60e-8cd1-4188-a557-cfaa304bee50 req-4642c4d3-0bee-414e-ab29-ce3a1ca09e67 service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Updating instance_info_cache with network_info: [{"id": "38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e", "address": "fa:16:3e:5f:b1:e3", "network": {"id": "ad5bb52a-b208-49a5-986c-0bfcdffe7d94", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-907734579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5784127fe9d4eefaa1f55f0eacdb91d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38f5393e-f3", "ovs_interfaceid": "38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.794488] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Task: {'id': task-5100727, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075857} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.796776] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 733.797462] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc674ef-900f-4098-86dc-60d3863846be {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.824094] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] fd650fdc-6b49-4051-8267-bbd1f0cb86f1/fd650fdc-6b49-4051-8267-bbd1f0cb86f1.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 733.829468] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77739a95-3224-4256-a1eb-70da8ae62b98 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.851618] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Waiting for the task: (returnval){ [ 733.851618] env[63538]: value = "task-5100728" [ 733.851618] env[63538]: _type = "Task" [ 733.851618] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.862207] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Task: {'id': task-5100728, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.895476] env[63538]: DEBUG nova.network.neutron [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Updating instance_info_cache with network_info: [{"id": "8dcd3b3f-b4cf-4491-b430-9ef54588c908", "address": "fa:16:3e:8b:fb:d5", "network": {"id": "6a40518f-aee7-4688-b562-66c0a5f77086", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-404633315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8342a170e0f54deea52260c8e741e891", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6365036-aa37-44d2-90d1-ca1c3516ded9", "external-id": "nsx-vlan-transportzone-66", "segmentation_id": 66, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8dcd3b3f-b4", "ovs_interfaceid": "8dcd3b3f-b4cf-4491-b430-9ef54588c908", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.971373] env[63538]: DEBUG oslo_concurrency.lockutils [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.151907] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-325cd31c-4f16-4721-9c20-1b6c26b72453 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.161594] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee703ad5-495b-4fe3-bad9-1ac6a7b14da8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.204903] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5751701-5acb-4a67-bd1e-b87fe49e4d12 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.213665] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc1bfe86-0544-4cb1-aa08-f638d58b5936 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.230332] env[63538]: DEBUG nova.compute.provider_tree [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 734.247024] env[63538]: DEBUG oslo_concurrency.lockutils [req-bc08d60e-8cd1-4188-a557-cfaa304bee50 req-4642c4d3-0bee-414e-ab29-ce3a1ca09e67 service nova] Releasing lock "refresh_cache-dbf48807-08a7-46d1-8454-42437a9f87c0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.362582] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Task: {'id': task-5100728, 'name': ReconfigVM_Task, 'duration_secs': 0.278705} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.362890] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Reconfigured VM instance instance-00000025 to attach disk [datastore1] fd650fdc-6b49-4051-8267-bbd1f0cb86f1/fd650fdc-6b49-4051-8267-bbd1f0cb86f1.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 734.363586] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-73dc94e2-62c5-452f-8b87-533923bb88aa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.372061] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Waiting for the task: (returnval){ [ 734.372061] env[63538]: value = "task-5100729" [ 734.372061] env[63538]: _type = "Task" [ 734.372061] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.382921] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Task: {'id': task-5100729, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.398916] env[63538]: DEBUG oslo_concurrency.lockutils [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Releasing lock "refresh_cache-080b11d7-a756-45a0-81d5-b5fcc2662ac9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.399295] env[63538]: DEBUG nova.compute.manager [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Instance network_info: |[{"id": "8dcd3b3f-b4cf-4491-b430-9ef54588c908", "address": "fa:16:3e:8b:fb:d5", "network": {"id": "6a40518f-aee7-4688-b562-66c0a5f77086", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-404633315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8342a170e0f54deea52260c8e741e891", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6365036-aa37-44d2-90d1-ca1c3516ded9", "external-id": "nsx-vlan-transportzone-66", "segmentation_id": 66, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8dcd3b3f-b4", "ovs_interfaceid": "8dcd3b3f-b4cf-4491-b430-9ef54588c908", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 734.399633] env[63538]: DEBUG oslo_concurrency.lockutils [req-8a9c718a-b309-4ca8-8da8-e0fd150bb4d6 req-4e6c12ad-5686-4670-a614-b1077e47f896 service nova] Acquired lock "refresh_cache-080b11d7-a756-45a0-81d5-b5fcc2662ac9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.399846] env[63538]: DEBUG nova.network.neutron [req-8a9c718a-b309-4ca8-8da8-e0fd150bb4d6 req-4e6c12ad-5686-4670-a614-b1077e47f896 service nova] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Refreshing network info cache for port 8dcd3b3f-b4cf-4491-b430-9ef54588c908 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 734.401192] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:fb:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6365036-aa37-44d2-90d1-ca1c3516ded9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8dcd3b3f-b4cf-4491-b430-9ef54588c908', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 734.410643] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Creating folder: Project (8342a170e0f54deea52260c8e741e891). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 734.415470] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7a9cb46d-f337-4db5-90cb-7a4b5c2a7ac5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.427183] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Created folder: Project (8342a170e0f54deea52260c8e741e891) in parent group-v992234. [ 734.427400] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Creating folder: Instances. Parent ref: group-v992337. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 734.427650] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4966f712-34c8-4452-af07-b86b80aaaa88 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.435369] env[63538]: DEBUG nova.network.neutron [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Updating instance_info_cache with network_info: [{"id": "a5dfe48b-4acc-472c-8e00-f936b4068ea5", "address": "fa:16:3e:a9:56:1a", "network": {"id": "4fd42fb1-fcc5-467d-88b7-1a5ab3297631", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-9303923-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c5e6ed681ed4078bd9115b30f419d9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5dfe48b-4a", "ovs_interfaceid": "a5dfe48b-4acc-472c-8e00-f936b4068ea5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.437982] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Created folder: Instances in parent group-v992337. [ 734.438356] env[63538]: DEBUG oslo.service.loopingcall [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 734.438983] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 734.439246] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f1b9977-766d-4a03-9ab8-b9b595e8776a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.466563] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 734.466563] env[63538]: value = "task-5100732" [ 734.466563] env[63538]: _type = "Task" [ 734.466563] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.473464] env[63538]: DEBUG nova.compute.manager [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 734.482634] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100732, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.541950] env[63538]: DEBUG nova.virt.hardware [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 734.542148] env[63538]: DEBUG nova.virt.hardware [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 734.542271] env[63538]: DEBUG nova.virt.hardware [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 734.542464] env[63538]: DEBUG nova.virt.hardware [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 734.542631] env[63538]: DEBUG nova.virt.hardware [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 734.542783] env[63538]: DEBUG nova.virt.hardware [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 734.543199] env[63538]: DEBUG nova.virt.hardware [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 734.543588] env[63538]: DEBUG nova.virt.hardware [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 734.543788] env[63538]: DEBUG nova.virt.hardware [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 734.543998] env[63538]: DEBUG nova.virt.hardware [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 734.544163] env[63538]: DEBUG nova.virt.hardware [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 734.545132] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28033b81-39f2-4c50-9f09-6c7aef0b8d8a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.558754] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feefbef1-0928-444e-8a62-0567f4478803 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.708434] env[63538]: DEBUG nova.network.neutron [req-8a9c718a-b309-4ca8-8da8-e0fd150bb4d6 req-4e6c12ad-5686-4670-a614-b1077e47f896 service nova] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Updated VIF entry in instance network info cache for port 8dcd3b3f-b4cf-4491-b430-9ef54588c908. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 734.709506] env[63538]: DEBUG nova.network.neutron [req-8a9c718a-b309-4ca8-8da8-e0fd150bb4d6 req-4e6c12ad-5686-4670-a614-b1077e47f896 service nova] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Updating instance_info_cache with network_info: [{"id": "8dcd3b3f-b4cf-4491-b430-9ef54588c908", "address": "fa:16:3e:8b:fb:d5", "network": {"id": "6a40518f-aee7-4688-b562-66c0a5f77086", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-404633315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8342a170e0f54deea52260c8e741e891", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6365036-aa37-44d2-90d1-ca1c3516ded9", "external-id": "nsx-vlan-transportzone-66", "segmentation_id": 66, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8dcd3b3f-b4", "ovs_interfaceid": "8dcd3b3f-b4cf-4491-b430-9ef54588c908", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.734407] env[63538]: DEBUG nova.scheduler.client.report [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 734.883023] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Task: {'id': task-5100729, 'name': Rename_Task, 'duration_secs': 0.290945} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.883374] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 734.884053] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e484617a-3c19-4095-b52f-3a424bb58bf0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.891109] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Waiting for the task: (returnval){ [ 734.891109] env[63538]: value = "task-5100733" [ 734.891109] env[63538]: _type = "Task" [ 734.891109] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.900419] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Task: {'id': task-5100733, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.940284] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Releasing lock "refresh_cache-f9fa5578-acf3-416f-9cb0-8ceb00e5132d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.994259] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100732, 'name': CreateVM_Task, 'duration_secs': 0.395165} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.001688] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 735.003615] env[63538]: DEBUG oslo_concurrency.lockutils [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 735.003859] env[63538]: DEBUG oslo_concurrency.lockutils [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.005406] env[63538]: DEBUG oslo_concurrency.lockutils [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 735.006297] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45848047-2a89-420b-ab3f-b6fe95876fcd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.017115] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Waiting for the task: (returnval){ [ 735.017115] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a2af95-c5fd-4090-1c95-2a4fcf3c747c" [ 735.017115] env[63538]: _type = "Task" [ 735.017115] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.033244] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a2af95-c5fd-4090-1c95-2a4fcf3c747c, 'name': SearchDatastore_Task, 'duration_secs': 0.010241} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.033605] env[63538]: DEBUG oslo_concurrency.lockutils [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.033856] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 735.034133] env[63538]: DEBUG oslo_concurrency.lockutils [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 735.034282] env[63538]: DEBUG oslo_concurrency.lockutils [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.034521] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 735.034805] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34d7dff0-afe7-4ca8-a50b-34197fec081d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.046391] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 735.046500] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 735.049137] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93a51759-73d8-44f7-8791-8657d79d2fe3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.057015] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Waiting for the task: (returnval){ [ 735.057015] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]528e5ed1-6371-2aeb-cc80-a025b110ed52" [ 735.057015] env[63538]: _type = "Task" [ 735.057015] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.077284] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]528e5ed1-6371-2aeb-cc80-a025b110ed52, 'name': SearchDatastore_Task, 'duration_secs': 0.011427} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.078487] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96ae7fbf-e4f2-41dd-8774-be6ccf22f7c2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.088457] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Waiting for the task: (returnval){ [ 735.088457] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f76be5-f446-9dea-1282-ff2da48bb128" [ 735.088457] env[63538]: _type = "Task" [ 735.088457] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.103800] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f76be5-f446-9dea-1282-ff2da48bb128, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.212836] env[63538]: DEBUG oslo_concurrency.lockutils [req-8a9c718a-b309-4ca8-8da8-e0fd150bb4d6 req-4e6c12ad-5686-4670-a614-b1077e47f896 service nova] Releasing lock "refresh_cache-080b11d7-a756-45a0-81d5-b5fcc2662ac9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.241117] env[63538]: DEBUG oslo_concurrency.lockutils [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.806s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.241723] env[63538]: DEBUG nova.compute.manager [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 735.247654] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.955s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.249411] env[63538]: INFO nova.compute.claims [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 735.324887] env[63538]: DEBUG nova.compute.manager [req-aa825526-5716-4a22-a220-cd203ada2cf7 req-44770acd-03fc-4745-b174-757c132dc4d8 service nova] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Received event network-vif-deleted-2357c1a1-6201-44ae-9461-80b6269920a2 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 735.407683] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Task: {'id': task-5100733, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.461199] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 735.462260] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efed254d-f860-4bfd-9711-4bb4cc5832c5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.470214] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 735.470613] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e1070980-6a9e-4038-8082-0a2b9cf23c14 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.513394] env[63538]: DEBUG nova.compute.manager [req-d1c47b6d-c50c-4121-9cb3-5cfea59a5eee req-197111c0-1e46-43a0-883b-356353732e89 service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Received event network-changed-38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 735.513620] env[63538]: DEBUG nova.compute.manager [req-d1c47b6d-c50c-4121-9cb3-5cfea59a5eee req-197111c0-1e46-43a0-883b-356353732e89 service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Refreshing instance network info cache due to event network-changed-38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 735.513842] env[63538]: DEBUG oslo_concurrency.lockutils [req-d1c47b6d-c50c-4121-9cb3-5cfea59a5eee req-197111c0-1e46-43a0-883b-356353732e89 service nova] Acquiring lock "refresh_cache-dbf48807-08a7-46d1-8454-42437a9f87c0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 735.513988] env[63538]: DEBUG oslo_concurrency.lockutils [req-d1c47b6d-c50c-4121-9cb3-5cfea59a5eee req-197111c0-1e46-43a0-883b-356353732e89 service nova] Acquired lock "refresh_cache-dbf48807-08a7-46d1-8454-42437a9f87c0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.514172] env[63538]: DEBUG nova.network.neutron [req-d1c47b6d-c50c-4121-9cb3-5cfea59a5eee req-197111c0-1e46-43a0-883b-356353732e89 service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Refreshing network info cache for port 38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 735.534570] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 735.534903] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 735.535121] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Deleting the datastore file [datastore2] f9fa5578-acf3-416f-9cb0-8ceb00e5132d {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 735.535393] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0d60aa5f-2c9d-4a7a-829d-6bd6c30cc433 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.543615] env[63538]: DEBUG oslo_vmware.api [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 735.543615] env[63538]: value = "task-5100735" [ 735.543615] env[63538]: _type = "Task" [ 735.543615] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.553731] env[63538]: DEBUG oslo_vmware.api [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100735, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.601147] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f76be5-f446-9dea-1282-ff2da48bb128, 'name': SearchDatastore_Task, 'duration_secs': 0.021158} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.601802] env[63538]: DEBUG oslo_concurrency.lockutils [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.602503] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 080b11d7-a756-45a0-81d5-b5fcc2662ac9/080b11d7-a756-45a0-81d5-b5fcc2662ac9.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 735.602503] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-def2ab51-4a87-49b4-af0b-d165864b7d38 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.610951] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Waiting for the task: (returnval){ [ 735.610951] env[63538]: value = "task-5100736" [ 735.610951] env[63538]: _type = "Task" [ 735.610951] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.621047] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Task: {'id': task-5100736, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.732699] env[63538]: DEBUG nova.network.neutron [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Successfully updated port: 61a2d4d3-83f2-4f4e-8217-ad2abe2975b9 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 735.757605] env[63538]: DEBUG nova.compute.utils [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 735.759811] env[63538]: DEBUG nova.compute.manager [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 735.759811] env[63538]: DEBUG nova.network.neutron [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 735.802613] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b37d1450-596a-4d18-9432-3b3266c2f4fb tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquiring lock "6f29f063-ddb5-491a-a1a0-7c9ed65a1718" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.826945] env[63538]: DEBUG nova.policy [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4dc985c2a1c842cea5bdea2209137f8d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a75dacc8d26c466bb9bd9e8c5d8acbf5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 735.904641] env[63538]: DEBUG oslo_vmware.api [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Task: {'id': task-5100733, 'name': PowerOnVM_Task, 'duration_secs': 0.797663} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.904939] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 735.905140] env[63538]: DEBUG nova.compute.manager [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 735.905954] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7230ac37-1f7f-4620-9763-c6de343ae202 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.059052] env[63538]: DEBUG oslo_vmware.api [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5100735, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170972} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.059052] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 736.059052] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 736.059052] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 736.090353] env[63538]: INFO nova.scheduler.client.report [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Deleted allocations for instance f9fa5578-acf3-416f-9cb0-8ceb00e5132d [ 736.122080] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Task: {'id': task-5100736, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.235633] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Acquiring lock "refresh_cache-4e89aa25-fb4a-430d-ab87-feff57b73780" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.235801] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Acquired lock "refresh_cache-4e89aa25-fb4a-430d-ab87-feff57b73780" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.235954] env[63538]: DEBUG nova.network.neutron [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 736.265529] env[63538]: DEBUG nova.compute.manager [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 736.392295] env[63538]: DEBUG nova.network.neutron [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Successfully created port: 27cdea75-ceda-4390-8313-cfbd1681ebd2 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 736.427979] env[63538]: DEBUG oslo_concurrency.lockutils [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.596012] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.624865] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Task: {'id': task-5100736, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.516138} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.627647] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 080b11d7-a756-45a0-81d5-b5fcc2662ac9/080b11d7-a756-45a0-81d5-b5fcc2662ac9.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 736.627889] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 736.628368] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0b0cb314-f3bb-46ff-a3f1-405813d260b7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.636591] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Waiting for the task: (returnval){ [ 736.636591] env[63538]: value = "task-5100737" [ 736.636591] env[63538]: _type = "Task" [ 736.636591] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.648829] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Task: {'id': task-5100737, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.694131] env[63538]: DEBUG nova.network.neutron [req-d1c47b6d-c50c-4121-9cb3-5cfea59a5eee req-197111c0-1e46-43a0-883b-356353732e89 service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Updated VIF entry in instance network info cache for port 38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 736.694553] env[63538]: DEBUG nova.network.neutron [req-d1c47b6d-c50c-4121-9cb3-5cfea59a5eee req-197111c0-1e46-43a0-883b-356353732e89 service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Updating instance_info_cache with network_info: [{"id": "38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e", "address": "fa:16:3e:5f:b1:e3", "network": {"id": "ad5bb52a-b208-49a5-986c-0bfcdffe7d94", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-907734579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5784127fe9d4eefaa1f55f0eacdb91d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00a15667-7ca5-4dc9-be92-164750d87988", "external-id": "nsx-vlan-transportzone-933", "segmentation_id": 933, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38f5393e-f3", "ovs_interfaceid": "38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.794058] env[63538]: DEBUG nova.network.neutron [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 736.814586] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc1b60a-97fe-429c-a70a-c9697b035e6c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.829967] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be2fce51-d6b7-4a45-83b8-a40f3e662062 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.867985] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd607e4d-efb0-493c-8394-f63d17cb3f34 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.877867] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b9777a8-8093-47df-88cd-93e50d941045 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.894792] env[63538]: DEBUG nova.compute.provider_tree [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 737.074914] env[63538]: DEBUG nova.network.neutron [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Updating instance_info_cache with network_info: [{"id": "61a2d4d3-83f2-4f4e-8217-ad2abe2975b9", "address": "fa:16:3e:25:f9:7a", "network": {"id": "70244fa4-bec1-46ff-9f61-a11beef0c837", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-153930308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e433921a148d427dbe349fc59afead0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61a2d4d3-83", "ovs_interfaceid": "61a2d4d3-83f2-4f4e-8217-ad2abe2975b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.147587] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Task: {'id': task-5100737, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068704} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.147587] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 737.148267] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b9f12ac-23ef-490b-a68c-5e38d589bab4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.171379] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 080b11d7-a756-45a0-81d5-b5fcc2662ac9/080b11d7-a756-45a0-81d5-b5fcc2662ac9.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 737.171725] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd6331e5-763b-4c0a-b194-5f9fb8884b4f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.196697] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Waiting for the task: (returnval){ [ 737.196697] env[63538]: value = "task-5100738" [ 737.196697] env[63538]: _type = "Task" [ 737.196697] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.202741] env[63538]: DEBUG oslo_concurrency.lockutils [req-d1c47b6d-c50c-4121-9cb3-5cfea59a5eee req-197111c0-1e46-43a0-883b-356353732e89 service nova] Releasing lock "refresh_cache-dbf48807-08a7-46d1-8454-42437a9f87c0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.207572] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Task: {'id': task-5100738, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.279037] env[63538]: DEBUG nova.compute.manager [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 737.307674] env[63538]: DEBUG nova.virt.hardware [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 737.307945] env[63538]: DEBUG nova.virt.hardware [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 737.308120] env[63538]: DEBUG nova.virt.hardware [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 737.308316] env[63538]: DEBUG nova.virt.hardware [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 737.308471] env[63538]: DEBUG nova.virt.hardware [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 737.308624] env[63538]: DEBUG nova.virt.hardware [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 737.308837] env[63538]: DEBUG nova.virt.hardware [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 737.309058] env[63538]: DEBUG nova.virt.hardware [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 737.309171] env[63538]: DEBUG nova.virt.hardware [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 737.309339] env[63538]: DEBUG nova.virt.hardware [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 737.309516] env[63538]: DEBUG nova.virt.hardware [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 737.310438] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07e8ff72-956a-4c44-b1c1-f659bc5336df {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.319683] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b393fba-30ac-4cd5-be5c-c8d5cb7b40d4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.398581] env[63538]: DEBUG nova.scheduler.client.report [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 737.577795] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Releasing lock "refresh_cache-4e89aa25-fb4a-430d-ab87-feff57b73780" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.578146] env[63538]: DEBUG nova.compute.manager [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Instance network_info: |[{"id": "61a2d4d3-83f2-4f4e-8217-ad2abe2975b9", "address": "fa:16:3e:25:f9:7a", "network": {"id": "70244fa4-bec1-46ff-9f61-a11beef0c837", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-153930308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e433921a148d427dbe349fc59afead0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61a2d4d3-83", "ovs_interfaceid": "61a2d4d3-83f2-4f4e-8217-ad2abe2975b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 737.578591] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:25:f9:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c9f208df-1fb5-4403-9796-7fd19e4bfb85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61a2d4d3-83f2-4f4e-8217-ad2abe2975b9', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 737.588615] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Creating folder: Project (e433921a148d427dbe349fc59afead0a). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 737.588914] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6cb7289a-59f4-4bec-a1c3-503e7e5a4aed {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.601153] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Created folder: Project (e433921a148d427dbe349fc59afead0a) in parent group-v992234. [ 737.601616] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Creating folder: Instances. Parent ref: group-v992340. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 737.601863] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-03edba83-9af5-47c6-844e-b86f21a2f4db {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.613273] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Created folder: Instances in parent group-v992340. [ 737.613273] env[63538]: DEBUG oslo.service.loopingcall [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 737.613273] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 737.613273] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d14e0e50-e430-4e9b-822c-afa8eaa7eb5f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.634390] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 737.634390] env[63538]: value = "task-5100741" [ 737.634390] env[63538]: _type = "Task" [ 737.634390] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.642337] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100741, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.709030] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Task: {'id': task-5100738, 'name': ReconfigVM_Task, 'duration_secs': 0.275444} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.709330] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 080b11d7-a756-45a0-81d5-b5fcc2662ac9/080b11d7-a756-45a0-81d5-b5fcc2662ac9.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 737.710062] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7f01129c-bf81-4294-aea6-10d17de714f1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.718340] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Waiting for the task: (returnval){ [ 737.718340] env[63538]: value = "task-5100742" [ 737.718340] env[63538]: _type = "Task" [ 737.718340] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.728239] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Task: {'id': task-5100742, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.810675] env[63538]: DEBUG nova.compute.manager [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Received event network-vif-unplugged-a5dfe48b-4acc-472c-8e00-f936b4068ea5 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 737.811202] env[63538]: DEBUG oslo_concurrency.lockutils [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] Acquiring lock "f9fa5578-acf3-416f-9cb0-8ceb00e5132d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.811202] env[63538]: DEBUG oslo_concurrency.lockutils [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] Lock "f9fa5578-acf3-416f-9cb0-8ceb00e5132d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.811302] env[63538]: DEBUG oslo_concurrency.lockutils [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] Lock "f9fa5578-acf3-416f-9cb0-8ceb00e5132d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.811541] env[63538]: DEBUG nova.compute.manager [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] No waiting events found dispatching network-vif-unplugged-a5dfe48b-4acc-472c-8e00-f936b4068ea5 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 737.811633] env[63538]: WARNING nova.compute.manager [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Received unexpected event network-vif-unplugged-a5dfe48b-4acc-472c-8e00-f936b4068ea5 for instance with vm_state shelved_offloaded and task_state None. [ 737.811791] env[63538]: DEBUG nova.compute.manager [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Received event network-changed-a5dfe48b-4acc-472c-8e00-f936b4068ea5 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 737.811975] env[63538]: DEBUG nova.compute.manager [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Refreshing instance network info cache due to event network-changed-a5dfe48b-4acc-472c-8e00-f936b4068ea5. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 737.812352] env[63538]: DEBUG oslo_concurrency.lockutils [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] Acquiring lock "refresh_cache-f9fa5578-acf3-416f-9cb0-8ceb00e5132d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.812511] env[63538]: DEBUG oslo_concurrency.lockutils [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] Acquired lock "refresh_cache-f9fa5578-acf3-416f-9cb0-8ceb00e5132d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.812686] env[63538]: DEBUG nova.network.neutron [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Refreshing network info cache for port a5dfe48b-4acc-472c-8e00-f936b4068ea5 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 737.903760] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.656s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.904236] env[63538]: DEBUG nova.compute.manager [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 737.907750] env[63538]: DEBUG oslo_concurrency.lockutils [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.288s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.908088] env[63538]: DEBUG oslo_concurrency.lockutils [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.910476] env[63538]: DEBUG oslo_concurrency.lockutils [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 43.599s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.912114] env[63538]: INFO nova.compute.claims [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 737.948528] env[63538]: INFO nova.scheduler.client.report [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Deleted allocations for instance 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff [ 738.146452] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100741, 'name': CreateVM_Task, 'duration_secs': 0.378073} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.146750] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 738.147395] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.147597] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.147964] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 738.148265] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ef1bf32-59fc-4a33-acab-3f5e6efb704b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.154065] env[63538]: DEBUG oslo_vmware.api [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Waiting for the task: (returnval){ [ 738.154065] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526ac40a-b5d7-bf6e-99d6-944bf8518808" [ 738.154065] env[63538]: _type = "Task" [ 738.154065] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.162701] env[63538]: DEBUG oslo_vmware.api [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526ac40a-b5d7-bf6e-99d6-944bf8518808, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.231495] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Task: {'id': task-5100742, 'name': Rename_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.360251] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Acquiring lock "fd650fdc-6b49-4051-8267-bbd1f0cb86f1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.360251] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Lock "fd650fdc-6b49-4051-8267-bbd1f0cb86f1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.360251] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Acquiring lock "fd650fdc-6b49-4051-8267-bbd1f0cb86f1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.360251] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Lock "fd650fdc-6b49-4051-8267-bbd1f0cb86f1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.360251] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Lock "fd650fdc-6b49-4051-8267-bbd1f0cb86f1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.364336] env[63538]: INFO nova.compute.manager [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Terminating instance [ 738.370570] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Acquiring lock "refresh_cache-fd650fdc-6b49-4051-8267-bbd1f0cb86f1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.370570] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Acquired lock "refresh_cache-fd650fdc-6b49-4051-8267-bbd1f0cb86f1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.370570] env[63538]: DEBUG nova.network.neutron [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 738.418988] env[63538]: DEBUG nova.compute.utils [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 738.423823] env[63538]: DEBUG nova.compute.manager [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 738.424262] env[63538]: DEBUG nova.network.neutron [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 738.458895] env[63538]: DEBUG oslo_concurrency.lockutils [None req-01f74780-a9e8-4a80-974f-f683675ce3e8 tempest-ServerAddressesNegativeTestJSON-348446036 tempest-ServerAddressesNegativeTestJSON-348446036-project-member] Lock "4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.484s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.497491] env[63538]: DEBUG nova.policy [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f91d4ce5a5724fb7b785591ae831506d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c1f0c999ede418c866074d9276050ff', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 738.664400] env[63538]: DEBUG oslo_vmware.api [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526ac40a-b5d7-bf6e-99d6-944bf8518808, 'name': SearchDatastore_Task, 'duration_secs': 0.010776} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.665731] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.666144] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 738.666588] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.666929] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.668055] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 738.668055] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-88389f9d-6b7c-4f9a-92c9-9d5797ead2ce {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.681148] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 738.681148] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 738.681148] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be042ae8-6581-4a2b-8d93-0a54d0ace444 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.691050] env[63538]: DEBUG oslo_vmware.api [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Waiting for the task: (returnval){ [ 738.691050] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a876d8-419e-60a8-2147-fcedacf7dbac" [ 738.691050] env[63538]: _type = "Task" [ 738.691050] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.699921] env[63538]: DEBUG oslo_vmware.api [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a876d8-419e-60a8-2147-fcedacf7dbac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.732854] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Task: {'id': task-5100742, 'name': Rename_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.765953] env[63538]: DEBUG nova.network.neutron [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Successfully updated port: 27cdea75-ceda-4390-8313-cfbd1681ebd2 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 738.834736] env[63538]: DEBUG nova.network.neutron [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Updated VIF entry in instance network info cache for port a5dfe48b-4acc-472c-8e00-f936b4068ea5. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 738.836592] env[63538]: DEBUG nova.network.neutron [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Updating instance_info_cache with network_info: [{"id": "a5dfe48b-4acc-472c-8e00-f936b4068ea5", "address": "fa:16:3e:a9:56:1a", "network": {"id": "4fd42fb1-fcc5-467d-88b7-1a5ab3297631", "bridge": null, "label": "tempest-ServersNegativeTestJSON-9303923-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c5e6ed681ed4078bd9115b30f419d9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapa5dfe48b-4a", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.900906] env[63538]: DEBUG nova.network.neutron [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 738.923306] env[63538]: DEBUG nova.compute.manager [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 738.937035] env[63538]: DEBUG nova.network.neutron [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Successfully created port: facecb08-5587-4113-9009-ad339833d9ab {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 738.939372] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquiring lock "65fc18ff-8901-40d2-8a5b-640eb9768240" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.939975] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "65fc18ff-8901-40d2-8a5b-640eb9768240" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.939975] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquiring lock "65fc18ff-8901-40d2-8a5b-640eb9768240-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.941188] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "65fc18ff-8901-40d2-8a5b-640eb9768240-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.941188] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "65fc18ff-8901-40d2-8a5b-640eb9768240-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.943835] env[63538]: INFO nova.compute.manager [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Terminating instance [ 738.945975] env[63538]: DEBUG nova.compute.manager [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 738.946308] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 738.947209] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d90ba783-3ab3-4756-a037-38805e06aa44 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.958092] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 738.958354] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d13a1829-713d-4f08-a708-e4d00e6221ca {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.967805] env[63538]: DEBUG oslo_vmware.api [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 738.967805] env[63538]: value = "task-5100743" [ 738.967805] env[63538]: _type = "Task" [ 738.967805] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.980293] env[63538]: DEBUG oslo_vmware.api [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100743, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.986098] env[63538]: DEBUG nova.network.neutron [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.096634] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquiring lock "f9fa5578-acf3-416f-9cb0-8ceb00e5132d" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.202820] env[63538]: DEBUG oslo_vmware.api [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a876d8-419e-60a8-2147-fcedacf7dbac, 'name': SearchDatastore_Task, 'duration_secs': 0.013508} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.203679] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e52a2f1-3f61-472b-8841-9104d4b45c20 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.210568] env[63538]: DEBUG oslo_vmware.api [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Waiting for the task: (returnval){ [ 739.210568] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52356431-3b1a-1255-e0e3-0a887982c677" [ 739.210568] env[63538]: _type = "Task" [ 739.210568] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.219630] env[63538]: DEBUG oslo_vmware.api [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52356431-3b1a-1255-e0e3-0a887982c677, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.231297] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Task: {'id': task-5100742, 'name': Rename_Task, 'duration_secs': 1.128352} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.231807] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 739.232112] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b091d724-af4c-4f8c-b389-68a18ac39bc4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.242410] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Waiting for the task: (returnval){ [ 739.242410] env[63538]: value = "task-5100744" [ 739.242410] env[63538]: _type = "Task" [ 739.242410] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.252161] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Task: {'id': task-5100744, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.272243] env[63538]: DEBUG oslo_concurrency.lockutils [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Acquiring lock "refresh_cache-736b110e-7265-42cc-9c9b-35f57c466b0c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.272243] env[63538]: DEBUG oslo_concurrency.lockutils [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Acquired lock "refresh_cache-736b110e-7265-42cc-9c9b-35f57c466b0c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.272243] env[63538]: DEBUG nova.network.neutron [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 739.340123] env[63538]: DEBUG oslo_concurrency.lockutils [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] Releasing lock "refresh_cache-f9fa5578-acf3-416f-9cb0-8ceb00e5132d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.340616] env[63538]: DEBUG nova.compute.manager [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Received event network-vif-plugged-61a2d4d3-83f2-4f4e-8217-ad2abe2975b9 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 739.341803] env[63538]: DEBUG oslo_concurrency.lockutils [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] Acquiring lock "4e89aa25-fb4a-430d-ab87-feff57b73780-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.341803] env[63538]: DEBUG oslo_concurrency.lockutils [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] Lock "4e89aa25-fb4a-430d-ab87-feff57b73780-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.341803] env[63538]: DEBUG oslo_concurrency.lockutils [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] Lock "4e89aa25-fb4a-430d-ab87-feff57b73780-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.341803] env[63538]: DEBUG nova.compute.manager [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] No waiting events found dispatching network-vif-plugged-61a2d4d3-83f2-4f4e-8217-ad2abe2975b9 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 739.341803] env[63538]: WARNING nova.compute.manager [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Received unexpected event network-vif-plugged-61a2d4d3-83f2-4f4e-8217-ad2abe2975b9 for instance with vm_state building and task_state spawning. [ 739.341803] env[63538]: DEBUG nova.compute.manager [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Received event network-changed-61a2d4d3-83f2-4f4e-8217-ad2abe2975b9 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 739.342105] env[63538]: DEBUG nova.compute.manager [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Refreshing instance network info cache due to event network-changed-61a2d4d3-83f2-4f4e-8217-ad2abe2975b9. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 739.342105] env[63538]: DEBUG oslo_concurrency.lockutils [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] Acquiring lock "refresh_cache-4e89aa25-fb4a-430d-ab87-feff57b73780" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.342559] env[63538]: DEBUG oslo_concurrency.lockutils [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] Acquired lock "refresh_cache-4e89aa25-fb4a-430d-ab87-feff57b73780" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.342559] env[63538]: DEBUG nova.network.neutron [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Refreshing network info cache for port 61a2d4d3-83f2-4f4e-8217-ad2abe2975b9 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 739.359239] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquiring lock "5421e135-9581-4f81-aa8a-2a604887a1df" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.359604] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "5421e135-9581-4f81-aa8a-2a604887a1df" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.359820] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquiring lock "5421e135-9581-4f81-aa8a-2a604887a1df-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.362383] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "5421e135-9581-4f81-aa8a-2a604887a1df-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.002s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.362577] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "5421e135-9581-4f81-aa8a-2a604887a1df-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.368269] env[63538]: INFO nova.compute.manager [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Terminating instance [ 739.370518] env[63538]: DEBUG nova.compute.manager [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 739.370766] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 739.371792] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b88be0-f032-4073-a8e9-3cec78494819 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.380896] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 739.381190] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-83b2f6c9-a826-4184-9e5b-9ecc88289fa1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.390556] env[63538]: DEBUG oslo_vmware.api [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 739.390556] env[63538]: value = "task-5100745" [ 739.390556] env[63538]: _type = "Task" [ 739.390556] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.406151] env[63538]: DEBUG oslo_vmware.api [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100745, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.478401] env[63538]: DEBUG oslo_vmware.api [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100743, 'name': PowerOffVM_Task, 'duration_secs': 0.215203} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.481786] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 739.481786] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 739.482499] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5fadfab9-a9d8-4aac-86fa-70c2bb0ebfb1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.490627] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Releasing lock "refresh_cache-fd650fdc-6b49-4051-8267-bbd1f0cb86f1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.491641] env[63538]: DEBUG nova.compute.manager [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 739.491882] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 739.494411] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4273a699-60a7-408a-a054-c92809fd6aa2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.506395] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 739.506819] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-202ca0d7-baee-41ec-a530-d638e4bca0b3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.516463] env[63538]: DEBUG oslo_vmware.api [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Waiting for the task: (returnval){ [ 739.516463] env[63538]: value = "task-5100747" [ 739.516463] env[63538]: _type = "Task" [ 739.516463] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.530545] env[63538]: DEBUG oslo_vmware.api [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100747, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.568682] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db0a46e6-cebc-4505-8545-abc0929652c4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.574253] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 739.574552] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 739.574906] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Deleting the datastore file [datastore1] 65fc18ff-8901-40d2-8a5b-640eb9768240 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 739.575818] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-068e3b22-57f9-4b55-81e8-94761ee66aef {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.581886] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac7abf5-387f-4636-94de-65751b6adafb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.588352] env[63538]: DEBUG oslo_vmware.api [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 739.588352] env[63538]: value = "task-5100748" [ 739.588352] env[63538]: _type = "Task" [ 739.588352] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.631533] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14175846-d1d1-4bee-8a35-28dc93765087 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.637886] env[63538]: DEBUG oslo_vmware.api [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100748, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.648023] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e67a37a-7da5-4399-895c-bea1709a1354 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.661230] env[63538]: DEBUG nova.compute.provider_tree [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 739.722170] env[63538]: DEBUG oslo_vmware.api [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52356431-3b1a-1255-e0e3-0a887982c677, 'name': SearchDatastore_Task, 'duration_secs': 0.00937} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.722508] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.722737] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 4e89aa25-fb4a-430d-ab87-feff57b73780/4e89aa25-fb4a-430d-ab87-feff57b73780.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 739.723046] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cff43fb1-b65d-4fb0-b2e9-1765a20fd7e0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.731043] env[63538]: DEBUG oslo_vmware.api [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Waiting for the task: (returnval){ [ 739.731043] env[63538]: value = "task-5100749" [ 739.731043] env[63538]: _type = "Task" [ 739.731043] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.740723] env[63538]: DEBUG oslo_vmware.api [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100749, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.752449] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Task: {'id': task-5100744, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.847114] env[63538]: DEBUG nova.network.neutron [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 739.901470] env[63538]: DEBUG oslo_vmware.api [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100745, 'name': PowerOffVM_Task, 'duration_secs': 0.239673} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.901470] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 739.901470] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 739.901470] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bca89004-fb99-4802-a363-d26967c2d7f3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.936056] env[63538]: DEBUG nova.compute.manager [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 739.966149] env[63538]: DEBUG nova.virt.hardware [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 739.967101] env[63538]: DEBUG nova.virt.hardware [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 739.967347] env[63538]: DEBUG nova.virt.hardware [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 739.967474] env[63538]: DEBUG nova.virt.hardware [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 739.967580] env[63538]: DEBUG nova.virt.hardware [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 739.967760] env[63538]: DEBUG nova.virt.hardware [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 739.967994] env[63538]: DEBUG nova.virt.hardware [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 739.968179] env[63538]: DEBUG nova.virt.hardware [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 739.968358] env[63538]: DEBUG nova.virt.hardware [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 739.968555] env[63538]: DEBUG nova.virt.hardware [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 739.968700] env[63538]: DEBUG nova.virt.hardware [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 739.969638] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51acf1bc-f8cc-464b-ba30-3913091a5445 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.982046] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c9b2df9-8631-447d-92f4-a0f8a3fd8641 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.987592] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 739.988142] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 739.988142] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Deleting the datastore file [datastore2] 5421e135-9581-4f81-aa8a-2a604887a1df {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 739.988953] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-477717a3-4227-4331-bfc4-ad21d9730233 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.004105] env[63538]: DEBUG oslo_vmware.api [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for the task: (returnval){ [ 740.004105] env[63538]: value = "task-5100751" [ 740.004105] env[63538]: _type = "Task" [ 740.004105] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.013163] env[63538]: DEBUG oslo_vmware.api [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100751, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.029366] env[63538]: DEBUG oslo_vmware.api [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100747, 'name': PowerOffVM_Task, 'duration_secs': 0.145088} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.029641] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 740.029739] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 740.030091] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-80fa7c3d-36a4-4c96-9ade-037f13af5496 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.070766] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 740.071540] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 740.071802] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Deleting the datastore file [datastore1] fd650fdc-6b49-4051-8267-bbd1f0cb86f1 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 740.072213] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf62abb1-02b2-4601-9c30-7698fc062f0e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.080816] env[63538]: DEBUG oslo_vmware.api [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Waiting for the task: (returnval){ [ 740.080816] env[63538]: value = "task-5100753" [ 740.080816] env[63538]: _type = "Task" [ 740.080816] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.100380] env[63538]: DEBUG oslo_vmware.api [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100753, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.112150] env[63538]: DEBUG oslo_vmware.api [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100748, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.210561} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.112150] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 740.112150] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 740.112150] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 740.112150] env[63538]: INFO nova.compute.manager [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Took 1.16 seconds to destroy the instance on the hypervisor. [ 740.112150] env[63538]: DEBUG oslo.service.loopingcall [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 740.112150] env[63538]: DEBUG nova.compute.manager [-] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 740.112150] env[63538]: DEBUG nova.network.neutron [-] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 740.166020] env[63538]: DEBUG nova.scheduler.client.report [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 740.243426] env[63538]: DEBUG oslo_vmware.api [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100749, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.254754] env[63538]: DEBUG oslo_vmware.api [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Task: {'id': task-5100744, 'name': PowerOnVM_Task, 'duration_secs': 0.528315} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.255170] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 740.255306] env[63538]: INFO nova.compute.manager [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Took 8.58 seconds to spawn the instance on the hypervisor. [ 740.255650] env[63538]: DEBUG nova.compute.manager [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 740.256833] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e218dd9-d032-4401-8586-aecf741cb6f8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.325656] env[63538]: DEBUG nova.compute.manager [req-8fef230d-a651-4cd5-b20a-0d3e4605f977 req-af6422b5-6809-4ae7-91d6-5823ba3287d0 service nova] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Received event network-vif-plugged-27cdea75-ceda-4390-8313-cfbd1681ebd2 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 740.325943] env[63538]: DEBUG oslo_concurrency.lockutils [req-8fef230d-a651-4cd5-b20a-0d3e4605f977 req-af6422b5-6809-4ae7-91d6-5823ba3287d0 service nova] Acquiring lock "736b110e-7265-42cc-9c9b-35f57c466b0c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.326314] env[63538]: DEBUG oslo_concurrency.lockutils [req-8fef230d-a651-4cd5-b20a-0d3e4605f977 req-af6422b5-6809-4ae7-91d6-5823ba3287d0 service nova] Lock "736b110e-7265-42cc-9c9b-35f57c466b0c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.326512] env[63538]: DEBUG oslo_concurrency.lockutils [req-8fef230d-a651-4cd5-b20a-0d3e4605f977 req-af6422b5-6809-4ae7-91d6-5823ba3287d0 service nova] Lock "736b110e-7265-42cc-9c9b-35f57c466b0c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.329603] env[63538]: DEBUG nova.compute.manager [req-8fef230d-a651-4cd5-b20a-0d3e4605f977 req-af6422b5-6809-4ae7-91d6-5823ba3287d0 service nova] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] No waiting events found dispatching network-vif-plugged-27cdea75-ceda-4390-8313-cfbd1681ebd2 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 740.329603] env[63538]: WARNING nova.compute.manager [req-8fef230d-a651-4cd5-b20a-0d3e4605f977 req-af6422b5-6809-4ae7-91d6-5823ba3287d0 service nova] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Received unexpected event network-vif-plugged-27cdea75-ceda-4390-8313-cfbd1681ebd2 for instance with vm_state building and task_state spawning. [ 740.329603] env[63538]: DEBUG nova.compute.manager [req-8fef230d-a651-4cd5-b20a-0d3e4605f977 req-af6422b5-6809-4ae7-91d6-5823ba3287d0 service nova] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Received event network-changed-27cdea75-ceda-4390-8313-cfbd1681ebd2 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 740.329603] env[63538]: DEBUG nova.compute.manager [req-8fef230d-a651-4cd5-b20a-0d3e4605f977 req-af6422b5-6809-4ae7-91d6-5823ba3287d0 service nova] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Refreshing instance network info cache due to event network-changed-27cdea75-ceda-4390-8313-cfbd1681ebd2. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 740.329603] env[63538]: DEBUG oslo_concurrency.lockutils [req-8fef230d-a651-4cd5-b20a-0d3e4605f977 req-af6422b5-6809-4ae7-91d6-5823ba3287d0 service nova] Acquiring lock "refresh_cache-736b110e-7265-42cc-9c9b-35f57c466b0c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 740.348362] env[63538]: DEBUG nova.network.neutron [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Updating instance_info_cache with network_info: [{"id": "27cdea75-ceda-4390-8313-cfbd1681ebd2", "address": "fa:16:3e:b7:0c:84", "network": {"id": "4f44aa50-ad3b-4530-bd6b-0adbebed9d1d", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-2084769802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a75dacc8d26c466bb9bd9e8c5d8acbf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27cdea75-ce", "ovs_interfaceid": "27cdea75-ceda-4390-8313-cfbd1681ebd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.500433] env[63538]: DEBUG nova.network.neutron [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Updated VIF entry in instance network info cache for port 61a2d4d3-83f2-4f4e-8217-ad2abe2975b9. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 740.501078] env[63538]: DEBUG nova.network.neutron [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Updating instance_info_cache with network_info: [{"id": "61a2d4d3-83f2-4f4e-8217-ad2abe2975b9", "address": "fa:16:3e:25:f9:7a", "network": {"id": "70244fa4-bec1-46ff-9f61-a11beef0c837", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-153930308-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e433921a148d427dbe349fc59afead0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61a2d4d3-83", "ovs_interfaceid": "61a2d4d3-83f2-4f4e-8217-ad2abe2975b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.521417] env[63538]: DEBUG oslo_vmware.api [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100751, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.595046] env[63538]: DEBUG oslo_vmware.api [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Task: {'id': task-5100753, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.458272} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.595330] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 740.595521] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 740.595723] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 740.595887] env[63538]: INFO nova.compute.manager [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Took 1.10 seconds to destroy the instance on the hypervisor. [ 740.596152] env[63538]: DEBUG oslo.service.loopingcall [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 740.596353] env[63538]: DEBUG nova.compute.manager [-] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 740.596449] env[63538]: DEBUG nova.network.neutron [-] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 740.628992] env[63538]: DEBUG nova.network.neutron [-] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 740.671433] env[63538]: DEBUG oslo_concurrency.lockutils [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.761s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.674024] env[63538]: DEBUG nova.compute.manager [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 740.675608] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.167s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.677344] env[63538]: INFO nova.compute.claims [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 740.742504] env[63538]: DEBUG oslo_vmware.api [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100749, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.65962} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.743272] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 4e89aa25-fb4a-430d-ab87-feff57b73780/4e89aa25-fb4a-430d-ab87-feff57b73780.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 740.745204] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 740.745204] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e5915977-caf2-47e2-a74e-b25adee86301 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.758044] env[63538]: DEBUG oslo_vmware.api [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Waiting for the task: (returnval){ [ 740.758044] env[63538]: value = "task-5100754" [ 740.758044] env[63538]: _type = "Task" [ 740.758044] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.773631] env[63538]: DEBUG oslo_vmware.api [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100754, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.779300] env[63538]: INFO nova.compute.manager [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Took 60.37 seconds to build instance. [ 740.782498] env[63538]: DEBUG nova.compute.manager [req-defe7bea-3916-4c68-ba51-9fa5cb343a3b req-fdbbf982-d568-441d-b0a2-e1e02e56839e service nova] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Received event network-vif-deleted-0be458b1-bd73-4d0f-8fd8-bcfec3c520c7 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 740.782884] env[63538]: INFO nova.compute.manager [req-defe7bea-3916-4c68-ba51-9fa5cb343a3b req-fdbbf982-d568-441d-b0a2-e1e02e56839e service nova] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Neutron deleted interface 0be458b1-bd73-4d0f-8fd8-bcfec3c520c7; detaching it from the instance and deleting it from the info cache [ 740.783238] env[63538]: DEBUG nova.network.neutron [req-defe7bea-3916-4c68-ba51-9fa5cb343a3b req-fdbbf982-d568-441d-b0a2-e1e02e56839e service nova] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.851598] env[63538]: DEBUG oslo_concurrency.lockutils [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Releasing lock "refresh_cache-736b110e-7265-42cc-9c9b-35f57c466b0c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.851979] env[63538]: DEBUG nova.compute.manager [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Instance network_info: |[{"id": "27cdea75-ceda-4390-8313-cfbd1681ebd2", "address": "fa:16:3e:b7:0c:84", "network": {"id": "4f44aa50-ad3b-4530-bd6b-0adbebed9d1d", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-2084769802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a75dacc8d26c466bb9bd9e8c5d8acbf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27cdea75-ce", "ovs_interfaceid": "27cdea75-ceda-4390-8313-cfbd1681ebd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 740.852323] env[63538]: DEBUG oslo_concurrency.lockutils [req-8fef230d-a651-4cd5-b20a-0d3e4605f977 req-af6422b5-6809-4ae7-91d6-5823ba3287d0 service nova] Acquired lock "refresh_cache-736b110e-7265-42cc-9c9b-35f57c466b0c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.852512] env[63538]: DEBUG nova.network.neutron [req-8fef230d-a651-4cd5-b20a-0d3e4605f977 req-af6422b5-6809-4ae7-91d6-5823ba3287d0 service nova] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Refreshing network info cache for port 27cdea75-ceda-4390-8313-cfbd1681ebd2 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 740.854134] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:0c:84', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee9ce73d-4ee8-4b28-b7d3-3a5735039627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '27cdea75-ceda-4390-8313-cfbd1681ebd2', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 740.863638] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Creating folder: Project (a75dacc8d26c466bb9bd9e8c5d8acbf5). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 740.865721] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2485c3c5-3850-4ea9-80c2-b911e8a101e0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.878601] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Created folder: Project (a75dacc8d26c466bb9bd9e8c5d8acbf5) in parent group-v992234. [ 740.878907] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Creating folder: Instances. Parent ref: group-v992343. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 740.879162] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-78d639f4-1723-4945-b334-aeb2f6236898 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.890671] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Created folder: Instances in parent group-v992343. [ 740.891264] env[63538]: DEBUG oslo.service.loopingcall [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 740.891264] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 740.891467] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-75ece3c7-2c0e-4244-ab89-1e346e7a64b5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.916476] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 740.916476] env[63538]: value = "task-5100757" [ 740.916476] env[63538]: _type = "Task" [ 740.916476] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.926413] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100757, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.927851] env[63538]: DEBUG nova.network.neutron [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Successfully updated port: facecb08-5587-4113-9009-ad339833d9ab {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 740.957156] env[63538]: DEBUG nova.network.neutron [-] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.005070] env[63538]: DEBUG oslo_concurrency.lockutils [req-4723fc3f-3a54-4ae2-846d-7a64bc269c56 req-17938dbc-9e11-4f7d-8495-2111026baa2e service nova] Releasing lock "refresh_cache-4e89aa25-fb4a-430d-ab87-feff57b73780" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.019143] env[63538]: DEBUG oslo_vmware.api [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Task: {'id': task-5100751, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.54423} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.019143] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 741.019143] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 741.019143] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 741.019365] env[63538]: INFO nova.compute.manager [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Took 1.65 seconds to destroy the instance on the hypervisor. [ 741.019479] env[63538]: DEBUG oslo.service.loopingcall [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 741.019672] env[63538]: DEBUG nova.compute.manager [-] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 741.019763] env[63538]: DEBUG nova.network.neutron [-] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 741.131654] env[63538]: DEBUG nova.network.neutron [-] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.182948] env[63538]: DEBUG nova.compute.utils [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 741.189336] env[63538]: DEBUG nova.compute.manager [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 741.189336] env[63538]: DEBUG nova.network.neutron [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 741.236732] env[63538]: DEBUG nova.policy [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '87c19c9ce3594acd96c1c215ef8ea555', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '422f50dc66ec48b7b262643390072f3d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 741.268212] env[63538]: DEBUG oslo_vmware.api [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100754, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069726} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.269296] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 741.270601] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade045dd-c3d4-435e-bd48-38788da0f72f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.287428] env[63538]: DEBUG oslo_concurrency.lockutils [None req-490d14a3-4a53-4959-897e-0567e4b42be9 tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Lock "080b11d7-a756-45a0-81d5-b5fcc2662ac9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 101.331s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 741.298847] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 4e89aa25-fb4a-430d-ab87-feff57b73780/4e89aa25-fb4a-430d-ab87-feff57b73780.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 741.299454] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-59bc652c-d980-40df-afb9-17c912364467 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.302866] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da59bfb7-8f7c-4f42-9ec8-b8e03c5ec4ea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.333271] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3edb6bf2-db86-47e1-94f5-57b216a29f3d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.350568] env[63538]: DEBUG oslo_vmware.api [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Waiting for the task: (returnval){ [ 741.350568] env[63538]: value = "task-5100758" [ 741.350568] env[63538]: _type = "Task" [ 741.350568] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.363619] env[63538]: DEBUG oslo_vmware.api [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100758, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.383140] env[63538]: DEBUG nova.compute.manager [req-defe7bea-3916-4c68-ba51-9fa5cb343a3b req-fdbbf982-d568-441d-b0a2-e1e02e56839e service nova] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Detach interface failed, port_id=0be458b1-bd73-4d0f-8fd8-bcfec3c520c7, reason: Instance 65fc18ff-8901-40d2-8a5b-640eb9768240 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 741.431595] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100757, 'name': CreateVM_Task, 'duration_secs': 0.362074} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.431595] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 741.431595] env[63538]: DEBUG oslo_concurrency.lockutils [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.431595] env[63538]: DEBUG oslo_concurrency.lockutils [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.431993] env[63538]: DEBUG oslo_concurrency.lockutils [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 741.434346] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "refresh_cache-e50e95c0-830b-4d71-999b-546b138bf8f4" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.434346] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquired lock "refresh_cache-e50e95c0-830b-4d71-999b-546b138bf8f4" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.434346] env[63538]: DEBUG nova.network.neutron [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 741.437417] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70e9bdf6-2165-4e54-9554-0ee7d5afe9ad {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.445479] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Waiting for the task: (returnval){ [ 741.445479] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527668e0-97ce-e216-d437-a19695555a12" [ 741.445479] env[63538]: _type = "Task" [ 741.445479] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.457354] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527668e0-97ce-e216-d437-a19695555a12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.460628] env[63538]: INFO nova.compute.manager [-] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Took 1.35 seconds to deallocate network for instance. [ 741.634642] env[63538]: INFO nova.compute.manager [-] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Took 1.04 seconds to deallocate network for instance. [ 741.688475] env[63538]: DEBUG nova.compute.manager [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 741.695444] env[63538]: DEBUG nova.network.neutron [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Successfully created port: c2833403-d523-4069-b5a5-778e92138ff9 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 741.824773] env[63538]: DEBUG nova.compute.manager [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 741.867346] env[63538]: DEBUG oslo_vmware.api [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100758, 'name': ReconfigVM_Task, 'duration_secs': 0.299462} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.867346] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 4e89aa25-fb4a-430d-ab87-feff57b73780/4e89aa25-fb4a-430d-ab87-feff57b73780.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 741.867805] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6a013e42-4324-4b85-927e-70a73ff34517 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.877017] env[63538]: DEBUG oslo_vmware.api [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Waiting for the task: (returnval){ [ 741.877017] env[63538]: value = "task-5100759" [ 741.877017] env[63538]: _type = "Task" [ 741.877017] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.892572] env[63538]: DEBUG oslo_vmware.api [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100759, 'name': Rename_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.961251] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527668e0-97ce-e216-d437-a19695555a12, 'name': SearchDatastore_Task, 'duration_secs': 0.020717} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.965548] env[63538]: DEBUG oslo_concurrency.lockutils [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.966077] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 741.967032] env[63538]: DEBUG oslo_concurrency.lockutils [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.967032] env[63538]: DEBUG oslo_concurrency.lockutils [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.967032] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 741.971450] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.971776] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f72a9802-c365-4cad-adc8-2890d346e38f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.984496] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 741.984743] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 741.985986] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a89d3f80-df6e-4363-a9d7-57a60d9cd0a1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.995627] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Waiting for the task: (returnval){ [ 741.995627] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522abb9e-138d-9ac2-3722-f96aa0bd1dca" [ 741.995627] env[63538]: _type = "Task" [ 741.995627] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.007930] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522abb9e-138d-9ac2-3722-f96aa0bd1dca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.037835] env[63538]: DEBUG nova.network.neutron [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 742.143815] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.203482] env[63538]: DEBUG nova.network.neutron [req-8fef230d-a651-4cd5-b20a-0d3e4605f977 req-af6422b5-6809-4ae7-91d6-5823ba3287d0 service nova] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Updated VIF entry in instance network info cache for port 27cdea75-ceda-4390-8313-cfbd1681ebd2. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 742.203723] env[63538]: DEBUG nova.network.neutron [req-8fef230d-a651-4cd5-b20a-0d3e4605f977 req-af6422b5-6809-4ae7-91d6-5823ba3287d0 service nova] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Updating instance_info_cache with network_info: [{"id": "27cdea75-ceda-4390-8313-cfbd1681ebd2", "address": "fa:16:3e:b7:0c:84", "network": {"id": "4f44aa50-ad3b-4530-bd6b-0adbebed9d1d", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-2084769802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a75dacc8d26c466bb9bd9e8c5d8acbf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27cdea75-ce", "ovs_interfaceid": "27cdea75-ceda-4390-8313-cfbd1681ebd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.213857] env[63538]: DEBUG nova.network.neutron [-] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.328236] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f1fd90-d610-4960-adb5-c87f355e87e4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.341777] env[63538]: DEBUG nova.network.neutron [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Updating instance_info_cache with network_info: [{"id": "facecb08-5587-4113-9009-ad339833d9ab", "address": "fa:16:3e:d8:f0:f8", "network": {"id": "2801e625-b23c-455a-80ae-80f2a4f34148", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1431071033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1f0c999ede418c866074d9276050ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfacecb08-55", "ovs_interfaceid": "facecb08-5587-4113-9009-ad339833d9ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.343852] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f20194b-fd44-4c9b-9c1a-a7a338cd37ce {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.385920] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.389725] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cdab7c2-7e7e-49b4-92cc-1c7d368b5190 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.400937] env[63538]: DEBUG oslo_vmware.api [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100759, 'name': Rename_Task, 'duration_secs': 0.200096} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.401607] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 742.402910] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31c322fe-53d7-436f-80af-f9490828fe5a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.406842] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6e0c3f92-df79-4075-a3b4-2faffbba70dd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.421684] env[63538]: DEBUG nova.compute.provider_tree [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 742.424276] env[63538]: DEBUG oslo_vmware.api [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Waiting for the task: (returnval){ [ 742.424276] env[63538]: value = "task-5100760" [ 742.424276] env[63538]: _type = "Task" [ 742.424276] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.507598] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522abb9e-138d-9ac2-3722-f96aa0bd1dca, 'name': SearchDatastore_Task, 'duration_secs': 0.019788} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.509317] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83e35043-6b6f-47d9-bf43-ebb1d6db6470 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.517276] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Waiting for the task: (returnval){ [ 742.517276] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b85a59-da33-15ae-be56-9a5206be4630" [ 742.517276] env[63538]: _type = "Task" [ 742.517276] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.529466] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b85a59-da33-15ae-be56-9a5206be4630, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.704202] env[63538]: DEBUG nova.compute.manager [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 742.708257] env[63538]: DEBUG oslo_concurrency.lockutils [req-8fef230d-a651-4cd5-b20a-0d3e4605f977 req-af6422b5-6809-4ae7-91d6-5823ba3287d0 service nova] Releasing lock "refresh_cache-736b110e-7265-42cc-9c9b-35f57c466b0c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.721157] env[63538]: INFO nova.compute.manager [-] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Took 1.70 seconds to deallocate network for instance. [ 742.738425] env[63538]: DEBUG nova.virt.hardware [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 742.738717] env[63538]: DEBUG nova.virt.hardware [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 742.739116] env[63538]: DEBUG nova.virt.hardware [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 742.739211] env[63538]: DEBUG nova.virt.hardware [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 742.739287] env[63538]: DEBUG nova.virt.hardware [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 742.739440] env[63538]: DEBUG nova.virt.hardware [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 742.741897] env[63538]: DEBUG nova.virt.hardware [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 742.741897] env[63538]: DEBUG nova.virt.hardware [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 742.741897] env[63538]: DEBUG nova.virt.hardware [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 742.741897] env[63538]: DEBUG nova.virt.hardware [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 742.741897] env[63538]: DEBUG nova.virt.hardware [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 742.741897] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4a2bde-da81-400a-953b-9821161e421e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.750876] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdaf9182-d672-4eab-b9e3-cfe6dab9b275 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.849285] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Releasing lock "refresh_cache-e50e95c0-830b-4d71-999b-546b138bf8f4" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.849622] env[63538]: DEBUG nova.compute.manager [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Instance network_info: |[{"id": "facecb08-5587-4113-9009-ad339833d9ab", "address": "fa:16:3e:d8:f0:f8", "network": {"id": "2801e625-b23c-455a-80ae-80f2a4f34148", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1431071033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1f0c999ede418c866074d9276050ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfacecb08-55", "ovs_interfaceid": "facecb08-5587-4113-9009-ad339833d9ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 742.850318] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:f0:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6edb8eae-1113-49d0-84f7-9fd9f82b26fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'facecb08-5587-4113-9009-ad339833d9ab', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 742.861890] env[63538]: DEBUG oslo.service.loopingcall [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 742.862608] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 742.862878] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a9b109f2-74fd-4a43-a0ff-aa26e6f0ea10 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.894424] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 742.894424] env[63538]: value = "task-5100761" [ 742.894424] env[63538]: _type = "Task" [ 742.894424] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.907022] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100761, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.926651] env[63538]: DEBUG nova.scheduler.client.report [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 742.943286] env[63538]: DEBUG oslo_vmware.api [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100760, 'name': PowerOnVM_Task, 'duration_secs': 0.491821} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.943753] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 742.944742] env[63538]: INFO nova.compute.manager [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Took 8.47 seconds to spawn the instance on the hypervisor. [ 742.944742] env[63538]: DEBUG nova.compute.manager [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 742.945108] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc5eb16-ac62-49b6-a9bd-9eeb0f33291c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.028599] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b85a59-da33-15ae-be56-9a5206be4630, 'name': SearchDatastore_Task, 'duration_secs': 0.012382} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.030542] env[63538]: DEBUG oslo_concurrency.lockutils [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.030542] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 736b110e-7265-42cc-9c9b-35f57c466b0c/736b110e-7265-42cc-9c9b-35f57c466b0c.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 743.030542] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3bb310db-bd2e-42f2-a454-6b9517c3c13e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.041798] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Waiting for the task: (returnval){ [ 743.041798] env[63538]: value = "task-5100762" [ 743.041798] env[63538]: _type = "Task" [ 743.041798] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.051133] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Task: {'id': task-5100762, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.163456] env[63538]: DEBUG nova.compute.manager [req-17b76de3-fa14-4b1f-ad94-e51a422e1f8a req-88885746-5d88-430a-a827-1c99254060df service nova] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Received event network-vif-plugged-facecb08-5587-4113-9009-ad339833d9ab {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 743.163658] env[63538]: DEBUG oslo_concurrency.lockutils [req-17b76de3-fa14-4b1f-ad94-e51a422e1f8a req-88885746-5d88-430a-a827-1c99254060df service nova] Acquiring lock "e50e95c0-830b-4d71-999b-546b138bf8f4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.164310] env[63538]: DEBUG oslo_concurrency.lockutils [req-17b76de3-fa14-4b1f-ad94-e51a422e1f8a req-88885746-5d88-430a-a827-1c99254060df service nova] Lock "e50e95c0-830b-4d71-999b-546b138bf8f4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.164310] env[63538]: DEBUG oslo_concurrency.lockutils [req-17b76de3-fa14-4b1f-ad94-e51a422e1f8a req-88885746-5d88-430a-a827-1c99254060df service nova] Lock "e50e95c0-830b-4d71-999b-546b138bf8f4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.165286] env[63538]: DEBUG nova.compute.manager [req-17b76de3-fa14-4b1f-ad94-e51a422e1f8a req-88885746-5d88-430a-a827-1c99254060df service nova] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] No waiting events found dispatching network-vif-plugged-facecb08-5587-4113-9009-ad339833d9ab {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 743.165286] env[63538]: WARNING nova.compute.manager [req-17b76de3-fa14-4b1f-ad94-e51a422e1f8a req-88885746-5d88-430a-a827-1c99254060df service nova] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Received unexpected event network-vif-plugged-facecb08-5587-4113-9009-ad339833d9ab for instance with vm_state building and task_state spawning. [ 743.165469] env[63538]: DEBUG nova.compute.manager [req-17b76de3-fa14-4b1f-ad94-e51a422e1f8a req-88885746-5d88-430a-a827-1c99254060df service nova] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Received event network-changed-facecb08-5587-4113-9009-ad339833d9ab {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 743.166126] env[63538]: DEBUG nova.compute.manager [req-17b76de3-fa14-4b1f-ad94-e51a422e1f8a req-88885746-5d88-430a-a827-1c99254060df service nova] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Refreshing instance network info cache due to event network-changed-facecb08-5587-4113-9009-ad339833d9ab. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 743.166126] env[63538]: DEBUG oslo_concurrency.lockutils [req-17b76de3-fa14-4b1f-ad94-e51a422e1f8a req-88885746-5d88-430a-a827-1c99254060df service nova] Acquiring lock "refresh_cache-e50e95c0-830b-4d71-999b-546b138bf8f4" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.166369] env[63538]: DEBUG oslo_concurrency.lockutils [req-17b76de3-fa14-4b1f-ad94-e51a422e1f8a req-88885746-5d88-430a-a827-1c99254060df service nova] Acquired lock "refresh_cache-e50e95c0-830b-4d71-999b-546b138bf8f4" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.166596] env[63538]: DEBUG nova.network.neutron [req-17b76de3-fa14-4b1f-ad94-e51a422e1f8a req-88885746-5d88-430a-a827-1c99254060df service nova] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Refreshing network info cache for port facecb08-5587-4113-9009-ad339833d9ab {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 743.229213] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.410196] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100761, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.438143] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.762s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.438650] env[63538]: DEBUG nova.compute.manager [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 743.442160] env[63538]: DEBUG oslo_concurrency.lockutils [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.545s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.442501] env[63538]: DEBUG oslo_concurrency.lockutils [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.445024] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.592s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.445837] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.448054] env[63538]: DEBUG oslo_concurrency.lockutils [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.195s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.448054] env[63538]: DEBUG oslo_concurrency.lockutils [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.451196] env[63538]: DEBUG oslo_concurrency.lockutils [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.288s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.451196] env[63538]: DEBUG oslo_concurrency.lockutils [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.451964] env[63538]: DEBUG oslo_concurrency.lockutils [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.270s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.455329] env[63538]: INFO nova.compute.claims [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 743.475538] env[63538]: INFO nova.compute.manager [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Took 61.50 seconds to build instance. [ 743.506543] env[63538]: INFO nova.scheduler.client.report [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Deleted allocations for instance 99de5226-a27c-47c5-90fa-5f0c7204df1c [ 743.506543] env[63538]: INFO nova.scheduler.client.report [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Deleted allocations for instance 36d40b69-fae7-4867-afa1-4befdc96bde0 [ 743.522523] env[63538]: INFO nova.scheduler.client.report [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Deleted allocations for instance 47500aaa-92fc-454c-badd-d6f8a2203083 [ 743.535027] env[63538]: INFO nova.scheduler.client.report [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Deleted allocations for instance 61068d41-5f5d-4ee5-b546-71da13eff93d [ 743.556686] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Task: {'id': task-5100762, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.638307] env[63538]: DEBUG nova.compute.manager [req-3fd8264e-9b89-43cd-b715-ee42cb260363 req-53b3d0da-e2f7-4fb2-a235-9ebe5be9d86d service nova] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Received event network-vif-plugged-c2833403-d523-4069-b5a5-778e92138ff9 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 743.638556] env[63538]: DEBUG oslo_concurrency.lockutils [req-3fd8264e-9b89-43cd-b715-ee42cb260363 req-53b3d0da-e2f7-4fb2-a235-9ebe5be9d86d service nova] Acquiring lock "04dc612b-7987-405b-9716-95c4ff3535ec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.638838] env[63538]: DEBUG oslo_concurrency.lockutils [req-3fd8264e-9b89-43cd-b715-ee42cb260363 req-53b3d0da-e2f7-4fb2-a235-9ebe5be9d86d service nova] Lock "04dc612b-7987-405b-9716-95c4ff3535ec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.639051] env[63538]: DEBUG oslo_concurrency.lockutils [req-3fd8264e-9b89-43cd-b715-ee42cb260363 req-53b3d0da-e2f7-4fb2-a235-9ebe5be9d86d service nova] Lock "04dc612b-7987-405b-9716-95c4ff3535ec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.639260] env[63538]: DEBUG nova.compute.manager [req-3fd8264e-9b89-43cd-b715-ee42cb260363 req-53b3d0da-e2f7-4fb2-a235-9ebe5be9d86d service nova] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] No waiting events found dispatching network-vif-plugged-c2833403-d523-4069-b5a5-778e92138ff9 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 743.639441] env[63538]: WARNING nova.compute.manager [req-3fd8264e-9b89-43cd-b715-ee42cb260363 req-53b3d0da-e2f7-4fb2-a235-9ebe5be9d86d service nova] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Received unexpected event network-vif-plugged-c2833403-d523-4069-b5a5-778e92138ff9 for instance with vm_state building and task_state spawning. [ 743.673027] env[63538]: DEBUG nova.network.neutron [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Successfully updated port: c2833403-d523-4069-b5a5-778e92138ff9 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 743.772121] env[63538]: DEBUG oslo_concurrency.lockutils [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Acquiring lock "080b11d7-a756-45a0-81d5-b5fcc2662ac9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.772121] env[63538]: DEBUG oslo_concurrency.lockutils [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Lock "080b11d7-a756-45a0-81d5-b5fcc2662ac9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.772121] env[63538]: DEBUG oslo_concurrency.lockutils [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Acquiring lock "080b11d7-a756-45a0-81d5-b5fcc2662ac9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.772121] env[63538]: DEBUG oslo_concurrency.lockutils [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Lock "080b11d7-a756-45a0-81d5-b5fcc2662ac9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.772121] env[63538]: DEBUG oslo_concurrency.lockutils [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Lock "080b11d7-a756-45a0-81d5-b5fcc2662ac9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.777594] env[63538]: INFO nova.compute.manager [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Terminating instance [ 743.782179] env[63538]: DEBUG nova.compute.manager [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 743.782179] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 743.782879] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ce055f2-3ea5-4c59-aee7-de92c4b7f121 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.792635] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 743.792960] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6421a31e-5b13-4c88-a22b-3436386c659c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.805552] env[63538]: DEBUG oslo_vmware.api [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Waiting for the task: (returnval){ [ 743.805552] env[63538]: value = "task-5100763" [ 743.805552] env[63538]: _type = "Task" [ 743.805552] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.817525] env[63538]: DEBUG oslo_vmware.api [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Task: {'id': task-5100763, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.911313] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100761, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.961455] env[63538]: DEBUG nova.compute.utils [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 743.962925] env[63538]: DEBUG nova.compute.manager [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 743.963154] env[63538]: DEBUG nova.network.neutron [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 743.978764] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9faf95d6-16b0-493f-a042-0a01a5809d4c tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Lock "4e89aa25-fb4a-430d-ab87-feff57b73780" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 101.089s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.021513] env[63538]: DEBUG nova.policy [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '861014e7810d4cf59cfa061acbb8f7eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4efc4733ea894fb7825e52b29ac8b6ba', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 744.023786] env[63538]: DEBUG oslo_concurrency.lockutils [None req-194beceb-c314-43e6-b851-d019672e47b4 tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "99de5226-a27c-47c5-90fa-5f0c7204df1c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.980s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.033623] env[63538]: DEBUG oslo_concurrency.lockutils [None req-485c5a4b-a4b8-4d35-99e7-a2be0f6cbbea tempest-ServersTestBootFromVolume-1667094028 tempest-ServersTestBootFromVolume-1667094028-project-member] Lock "47500aaa-92fc-454c-badd-d6f8a2203083" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.043s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.047893] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6bae472e-ce77-4b6b-8807-24b801d0fe8b tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "61068d41-5f5d-4ee5-b546-71da13eff93d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.443s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.057843] env[63538]: DEBUG oslo_concurrency.lockutils [None req-67b7113f-66b5-420c-b016-e30811283a97 tempest-ServersAdminNegativeTestJSON-1169563937 tempest-ServersAdminNegativeTestJSON-1169563937-project-member] Lock "36d40b69-fae7-4867-afa1-4befdc96bde0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.371s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.063020] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Task: {'id': task-5100762, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.709523} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.063941] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 736b110e-7265-42cc-9c9b-35f57c466b0c/736b110e-7265-42cc-9c9b-35f57c466b0c.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 744.063941] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 744.064198] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8da7f24e-25f8-41a1-ac66-1a4888dbb22e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.072533] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Waiting for the task: (returnval){ [ 744.072533] env[63538]: value = "task-5100764" [ 744.072533] env[63538]: _type = "Task" [ 744.072533] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.086399] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Task: {'id': task-5100764, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.149036] env[63538]: DEBUG nova.network.neutron [req-17b76de3-fa14-4b1f-ad94-e51a422e1f8a req-88885746-5d88-430a-a827-1c99254060df service nova] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Updated VIF entry in instance network info cache for port facecb08-5587-4113-9009-ad339833d9ab. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 744.149404] env[63538]: DEBUG nova.network.neutron [req-17b76de3-fa14-4b1f-ad94-e51a422e1f8a req-88885746-5d88-430a-a827-1c99254060df service nova] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Updating instance_info_cache with network_info: [{"id": "facecb08-5587-4113-9009-ad339833d9ab", "address": "fa:16:3e:d8:f0:f8", "network": {"id": "2801e625-b23c-455a-80ae-80f2a4f34148", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1431071033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1f0c999ede418c866074d9276050ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfacecb08-55", "ovs_interfaceid": "facecb08-5587-4113-9009-ad339833d9ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.178020] env[63538]: DEBUG oslo_concurrency.lockutils [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "refresh_cache-04dc612b-7987-405b-9716-95c4ff3535ec" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 744.178214] env[63538]: DEBUG oslo_concurrency.lockutils [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquired lock "refresh_cache-04dc612b-7987-405b-9716-95c4ff3535ec" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.179825] env[63538]: DEBUG nova.network.neutron [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 744.319870] env[63538]: DEBUG oslo_vmware.api [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Task: {'id': task-5100763, 'name': PowerOffVM_Task, 'duration_secs': 0.241407} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.320655] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 744.321139] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 744.323480] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-67725d7a-968f-4d5d-a3fb-ae0613e36b6f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.358213] env[63538]: DEBUG nova.network.neutron [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Successfully created port: 1d37786b-c74d-41d0-a685-3082d8f007be {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 744.388921] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 744.389710] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 744.390029] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Deleting the datastore file [datastore1] 080b11d7-a756-45a0-81d5-b5fcc2662ac9 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 744.390954] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-67418e63-dd94-4bed-9231-30c429608c06 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.403640] env[63538]: DEBUG oslo_vmware.api [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Waiting for the task: (returnval){ [ 744.403640] env[63538]: value = "task-5100766" [ 744.403640] env[63538]: _type = "Task" [ 744.403640] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.410775] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100761, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.416316] env[63538]: DEBUG oslo_vmware.api [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Task: {'id': task-5100766, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.468562] env[63538]: DEBUG nova.compute.manager [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 744.485745] env[63538]: DEBUG nova.compute.manager [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 744.587902] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Task: {'id': task-5100764, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070735} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.587996] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 744.588865] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f7d1ab-017e-4614-aac7-952ff07ebc27 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.621055] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 736b110e-7265-42cc-9c9b-35f57c466b0c/736b110e-7265-42cc-9c9b-35f57c466b0c.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 744.624779] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f80aa8d2-ea13-4ac0-9510-3dcd2f9d8e61 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.646365] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Waiting for the task: (returnval){ [ 744.646365] env[63538]: value = "task-5100767" [ 744.646365] env[63538]: _type = "Task" [ 744.646365] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.656593] env[63538]: DEBUG oslo_concurrency.lockutils [req-17b76de3-fa14-4b1f-ad94-e51a422e1f8a req-88885746-5d88-430a-a827-1c99254060df service nova] Releasing lock "refresh_cache-e50e95c0-830b-4d71-999b-546b138bf8f4" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.656708] env[63538]: DEBUG nova.compute.manager [req-17b76de3-fa14-4b1f-ad94-e51a422e1f8a req-88885746-5d88-430a-a827-1c99254060df service nova] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Received event network-vif-deleted-42827ce2-838a-4a4a-b29a-40f1d29e7a3e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 744.657108] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Task: {'id': task-5100767, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.743951] env[63538]: DEBUG nova.network.neutron [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 744.913249] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100761, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.923481] env[63538]: DEBUG oslo_vmware.api [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Task: {'id': task-5100766, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170733} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.923764] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 744.923954] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 744.924497] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 744.924497] env[63538]: INFO nova.compute.manager [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Took 1.14 seconds to destroy the instance on the hypervisor. [ 744.924668] env[63538]: DEBUG oslo.service.loopingcall [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 744.926029] env[63538]: DEBUG nova.compute.manager [-] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 744.926029] env[63538]: DEBUG nova.network.neutron [-] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 745.022735] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.116738] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e856d0da-758c-4158-aad1-c43d62e72d4e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.125184] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d49530f-67b3-405d-aa79-a16495819ae6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.164718] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-932b384a-337e-4021-9fd6-833d2ac235a7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.173930] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Task: {'id': task-5100767, 'name': ReconfigVM_Task, 'duration_secs': 0.304765} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.176731] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 736b110e-7265-42cc-9c9b-35f57c466b0c/736b110e-7265-42cc-9c9b-35f57c466b0c.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 745.178325] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ce46b68e-a77a-4293-a464-7675969e91a0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.182036] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-debca355-03f5-46bd-9146-c23171d9ef6d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.199885] env[63538]: DEBUG nova.compute.provider_tree [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 745.203375] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Waiting for the task: (returnval){ [ 745.203375] env[63538]: value = "task-5100768" [ 745.203375] env[63538]: _type = "Task" [ 745.203375] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.213880] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Task: {'id': task-5100768, 'name': Rename_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.258042] env[63538]: DEBUG nova.network.neutron [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Updating instance_info_cache with network_info: [{"id": "c2833403-d523-4069-b5a5-778e92138ff9", "address": "fa:16:3e:5f:16:e3", "network": {"id": "4b4aacee-e2db-457b-9900-6c94f101831e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1899782517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "422f50dc66ec48b7b262643390072f3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2833403-d5", "ovs_interfaceid": "c2833403-d523-4069-b5a5-778e92138ff9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.294564] env[63538]: DEBUG nova.compute.manager [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 745.295630] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-126d15b8-08d0-4826-a6a2-0a7392b6c772 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.409662] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100761, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.479846] env[63538]: DEBUG nova.compute.manager [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 745.508578] env[63538]: DEBUG oslo_concurrency.lockutils [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "a7bb1869-5553-40d8-9c0b-366ccdef5fae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.508578] env[63538]: DEBUG oslo_concurrency.lockutils [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "a7bb1869-5553-40d8-9c0b-366ccdef5fae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.508811] env[63538]: DEBUG oslo_concurrency.lockutils [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "a7bb1869-5553-40d8-9c0b-366ccdef5fae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.508914] env[63538]: DEBUG oslo_concurrency.lockutils [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "a7bb1869-5553-40d8-9c0b-366ccdef5fae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.510357] env[63538]: DEBUG oslo_concurrency.lockutils [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "a7bb1869-5553-40d8-9c0b-366ccdef5fae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.514233] env[63538]: INFO nova.compute.manager [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Terminating instance [ 745.516110] env[63538]: DEBUG nova.compute.manager [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 745.516338] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 745.518030] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38217fd5-ec98-4ccb-915c-2bcee4d9b3ff {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.524156] env[63538]: DEBUG nova.virt.hardware [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 745.524156] env[63538]: DEBUG nova.virt.hardware [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 745.524361] env[63538]: DEBUG nova.virt.hardware [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 745.524438] env[63538]: DEBUG nova.virt.hardware [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 745.524607] env[63538]: DEBUG nova.virt.hardware [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 745.524764] env[63538]: DEBUG nova.virt.hardware [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 745.524981] env[63538]: DEBUG nova.virt.hardware [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 745.525167] env[63538]: DEBUG nova.virt.hardware [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 745.525345] env[63538]: DEBUG nova.virt.hardware [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 745.525515] env[63538]: DEBUG nova.virt.hardware [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 745.525699] env[63538]: DEBUG nova.virt.hardware [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 745.527469] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25f5c661-81af-4836-8181-21ae570538e7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.539986] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be3c9bcc-d6d6-40b2-9057-fef8ae1426bd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.545676] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 745.546458] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-992ec884-4032-4a4a-bb93-ac939d1c1456 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.563120] env[63538]: DEBUG oslo_vmware.api [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 745.563120] env[63538]: value = "task-5100769" [ 745.563120] env[63538]: _type = "Task" [ 745.563120] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.572719] env[63538]: DEBUG oslo_vmware.api [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100769, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.705685] env[63538]: DEBUG nova.scheduler.client.report [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 745.722179] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Task: {'id': task-5100768, 'name': Rename_Task, 'duration_secs': 0.160407} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.722844] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 745.723251] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1fe46703-7c11-4935-beae-f944f77e4314 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.732279] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Waiting for the task: (returnval){ [ 745.732279] env[63538]: value = "task-5100770" [ 745.732279] env[63538]: _type = "Task" [ 745.732279] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.742796] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Task: {'id': task-5100770, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.763921] env[63538]: DEBUG oslo_concurrency.lockutils [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Releasing lock "refresh_cache-04dc612b-7987-405b-9716-95c4ff3535ec" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 745.763921] env[63538]: DEBUG nova.compute.manager [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Instance network_info: |[{"id": "c2833403-d523-4069-b5a5-778e92138ff9", "address": "fa:16:3e:5f:16:e3", "network": {"id": "4b4aacee-e2db-457b-9900-6c94f101831e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1899782517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "422f50dc66ec48b7b262643390072f3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2833403-d5", "ovs_interfaceid": "c2833403-d523-4069-b5a5-778e92138ff9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 745.763921] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:16:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f972c061-0cd5-4aed-8cfb-42cc4a08835a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c2833403-d523-4069-b5a5-778e92138ff9', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 745.772733] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Creating folder: Project (422f50dc66ec48b7b262643390072f3d). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 745.773322] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-39b20d6b-f939-4a49-8877-725a36855fa6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.788097] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Created folder: Project (422f50dc66ec48b7b262643390072f3d) in parent group-v992234. [ 745.788358] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Creating folder: Instances. Parent ref: group-v992347. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 745.788765] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1f797ebf-fe42-464b-b4fb-443df84afd3a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.802550] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Created folder: Instances in parent group-v992347. [ 745.802964] env[63538]: DEBUG oslo.service.loopingcall [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 745.803260] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 745.803584] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b8b110e3-2edc-42aa-babf-810de374a364 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.821180] env[63538]: INFO nova.compute.manager [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] instance snapshotting [ 745.825351] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04008332-a29d-4e5d-9b9e-dcbc5593a78f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.832903] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 745.832903] env[63538]: value = "task-5100773" [ 745.832903] env[63538]: _type = "Task" [ 745.832903] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.858277] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10779394-3dd0-4dad-b9ee-4da11cd94f1a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.864994] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100773, 'name': CreateVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.915192] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100761, 'name': CreateVM_Task, 'duration_secs': 2.518719} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.915192] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 745.915965] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 745.916202] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.916628] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 745.916940] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30e622ed-8a7f-48dd-a357-585e9ed018b6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.923524] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 745.923524] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52419b1d-ef16-f9d2-d9eb-62de44de7980" [ 745.923524] env[63538]: _type = "Task" [ 745.923524] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.934425] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52419b1d-ef16-f9d2-d9eb-62de44de7980, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.077224] env[63538]: DEBUG oslo_vmware.api [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100769, 'name': PowerOffVM_Task, 'duration_secs': 0.239644} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.081064] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 746.081064] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 746.081064] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-165507ab-bc7b-43eb-8e1c-424aebd3f264 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.145186] env[63538]: DEBUG nova.network.neutron [-] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.161586] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 746.161878] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 746.162174] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Deleting the datastore file [datastore1] a7bb1869-5553-40d8-9c0b-366ccdef5fae {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 746.166270] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-03e00e44-e393-4083-bde7-3bce939e1533 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.180034] env[63538]: DEBUG oslo_vmware.api [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 746.180034] env[63538]: value = "task-5100775" [ 746.180034] env[63538]: _type = "Task" [ 746.180034] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.195320] env[63538]: DEBUG oslo_vmware.api [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100775, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.219029] env[63538]: DEBUG oslo_concurrency.lockutils [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.764s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.219029] env[63538]: DEBUG nova.compute.manager [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 746.220771] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.510s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.226210] env[63538]: INFO nova.compute.claims [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 746.248320] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Task: {'id': task-5100770, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.350162] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100773, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.377513] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Creating Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 746.378898] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-77a8145b-1d89-4fc7-aa56-3d94902ec7be {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.391338] env[63538]: DEBUG oslo_vmware.api [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Waiting for the task: (returnval){ [ 746.391338] env[63538]: value = "task-5100776" [ 746.391338] env[63538]: _type = "Task" [ 746.391338] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.407178] env[63538]: DEBUG oslo_vmware.api [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100776, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.442559] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52419b1d-ef16-f9d2-d9eb-62de44de7980, 'name': SearchDatastore_Task, 'duration_secs': 0.013839} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.443030] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.443379] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 746.443751] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.443992] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.444288] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 746.444771] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-85e67979-3554-49b9-be04-a62e44ed6154 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.456761] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 746.457079] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 746.458219] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3510ebdd-e1cd-49ad-91cb-b0276dc60e00 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.468152] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 746.468152] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52dfc746-8e00-218b-cc89-1ce740974ba2" [ 746.468152] env[63538]: _type = "Task" [ 746.468152] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.478016] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52dfc746-8e00-218b-cc89-1ce740974ba2, 'name': SearchDatastore_Task, 'duration_secs': 0.011396} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.478831] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fd52a42-b18f-4428-894b-a23f87afb942 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.485881] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 746.485881] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525afadb-e617-5d05-b54a-3c44d1c75889" [ 746.485881] env[63538]: _type = "Task" [ 746.485881] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.499142] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525afadb-e617-5d05-b54a-3c44d1c75889, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.502669] env[63538]: DEBUG nova.network.neutron [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Successfully updated port: 1d37786b-c74d-41d0-a685-3082d8f007be {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 746.630205] env[63538]: DEBUG nova.compute.manager [req-c1c95a3b-73f0-4849-95c1-e7ace6e63ce6 req-fcd76fb6-6e3a-40c2-8467-af8c92af2762 service nova] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Received event network-vif-deleted-8dcd3b3f-b4cf-4491-b430-9ef54588c908 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 746.641490] env[63538]: DEBUG nova.compute.manager [req-5bc1b704-7653-4648-b0a2-cdab306dbd7d req-31c71036-6346-442b-a555-4ccf919f03b2 service nova] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Received event network-changed-c2833403-d523-4069-b5a5-778e92138ff9 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 746.641708] env[63538]: DEBUG nova.compute.manager [req-5bc1b704-7653-4648-b0a2-cdab306dbd7d req-31c71036-6346-442b-a555-4ccf919f03b2 service nova] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Refreshing instance network info cache due to event network-changed-c2833403-d523-4069-b5a5-778e92138ff9. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 746.641967] env[63538]: DEBUG oslo_concurrency.lockutils [req-5bc1b704-7653-4648-b0a2-cdab306dbd7d req-31c71036-6346-442b-a555-4ccf919f03b2 service nova] Acquiring lock "refresh_cache-04dc612b-7987-405b-9716-95c4ff3535ec" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.642136] env[63538]: DEBUG oslo_concurrency.lockutils [req-5bc1b704-7653-4648-b0a2-cdab306dbd7d req-31c71036-6346-442b-a555-4ccf919f03b2 service nova] Acquired lock "refresh_cache-04dc612b-7987-405b-9716-95c4ff3535ec" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.642306] env[63538]: DEBUG nova.network.neutron [req-5bc1b704-7653-4648-b0a2-cdab306dbd7d req-31c71036-6346-442b-a555-4ccf919f03b2 service nova] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Refreshing network info cache for port c2833403-d523-4069-b5a5-778e92138ff9 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 746.646725] env[63538]: INFO nova.compute.manager [-] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Took 1.72 seconds to deallocate network for instance. [ 746.690438] env[63538]: DEBUG oslo_vmware.api [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100775, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169743} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.690722] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 746.690940] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 746.691137] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 746.691341] env[63538]: INFO nova.compute.manager [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Took 1.18 seconds to destroy the instance on the hypervisor. [ 746.691635] env[63538]: DEBUG oslo.service.loopingcall [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 746.691816] env[63538]: DEBUG nova.compute.manager [-] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 746.691915] env[63538]: DEBUG nova.network.neutron [-] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 746.728630] env[63538]: DEBUG nova.compute.utils [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 746.732017] env[63538]: DEBUG nova.compute.manager [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 746.732530] env[63538]: DEBUG nova.network.neutron [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 746.744303] env[63538]: DEBUG oslo_vmware.api [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Task: {'id': task-5100770, 'name': PowerOnVM_Task, 'duration_secs': 0.536194} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.745675] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 746.745808] env[63538]: INFO nova.compute.manager [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Took 9.47 seconds to spawn the instance on the hypervisor. [ 746.745898] env[63538]: DEBUG nova.compute.manager [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 746.746740] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e419ea41-a196-4da2-8187-fb959cb04bca {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.801936] env[63538]: DEBUG nova.policy [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9a5b18ded69f40bab03d546142bc4517', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e6480e3bc216427d939223b9e3b6a21b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 746.850922] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100773, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.903341] env[63538]: DEBUG oslo_vmware.api [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100776, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.997891] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525afadb-e617-5d05-b54a-3c44d1c75889, 'name': SearchDatastore_Task, 'duration_secs': 0.010161} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.998318] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.998435] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] e50e95c0-830b-4d71-999b-546b138bf8f4/e50e95c0-830b-4d71-999b-546b138bf8f4.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 746.998700] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-deb0733b-d418-4b2a-825a-5fa66de59328 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.006610] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquiring lock "refresh_cache-db5993ce-6982-4b82-8f5d-3fe51df8896b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 747.006947] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquired lock "refresh_cache-db5993ce-6982-4b82-8f5d-3fe51df8896b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.006947] env[63538]: DEBUG nova.network.neutron [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 747.008125] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 747.008125] env[63538]: value = "task-5100777" [ 747.008125] env[63538]: _type = "Task" [ 747.008125] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.019427] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100777, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.156609] env[63538]: DEBUG oslo_concurrency.lockutils [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.189162] env[63538]: DEBUG oslo_concurrency.lockutils [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "79f4cdd9-219a-4440-9dd2-9b2a360965b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.189544] env[63538]: DEBUG oslo_concurrency.lockutils [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "79f4cdd9-219a-4440-9dd2-9b2a360965b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.237227] env[63538]: DEBUG nova.compute.utils [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 747.268641] env[63538]: INFO nova.compute.manager [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Took 63.56 seconds to build instance. [ 747.359053] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100773, 'name': CreateVM_Task, 'duration_secs': 1.484769} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.359053] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 747.360449] env[63538]: DEBUG oslo_concurrency.lockutils [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 747.360449] env[63538]: DEBUG oslo_concurrency.lockutils [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.360449] env[63538]: DEBUG oslo_concurrency.lockutils [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 747.360992] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce28077e-9c7e-4071-b7d8-839f1a410501 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.369890] env[63538]: DEBUG oslo_vmware.api [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 747.369890] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]528f1b3f-b2b0-c2a1-6a31-b7a681c28bb2" [ 747.369890] env[63538]: _type = "Task" [ 747.369890] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.385862] env[63538]: DEBUG oslo_vmware.api [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]528f1b3f-b2b0-c2a1-6a31-b7a681c28bb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.406352] env[63538]: DEBUG oslo_vmware.api [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100776, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.536804] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100777, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.622551] env[63538]: DEBUG nova.network.neutron [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 747.714170] env[63538]: DEBUG nova.network.neutron [-] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.730262] env[63538]: DEBUG nova.network.neutron [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Successfully created port: 6d45b11c-cd3e-4589-9931-5ffdbbc4e193 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 747.745534] env[63538]: DEBUG nova.compute.manager [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 747.770527] env[63538]: DEBUG nova.network.neutron [req-5bc1b704-7653-4648-b0a2-cdab306dbd7d req-31c71036-6346-442b-a555-4ccf919f03b2 service nova] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Updated VIF entry in instance network info cache for port c2833403-d523-4069-b5a5-778e92138ff9. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 747.770974] env[63538]: DEBUG nova.network.neutron [req-5bc1b704-7653-4648-b0a2-cdab306dbd7d req-31c71036-6346-442b-a555-4ccf919f03b2 service nova] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Updating instance_info_cache with network_info: [{"id": "c2833403-d523-4069-b5a5-778e92138ff9", "address": "fa:16:3e:5f:16:e3", "network": {"id": "4b4aacee-e2db-457b-9900-6c94f101831e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1899782517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "422f50dc66ec48b7b262643390072f3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2833403-d5", "ovs_interfaceid": "c2833403-d523-4069-b5a5-778e92138ff9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.772330] env[63538]: DEBUG oslo_concurrency.lockutils [None req-808e21d7-732a-4472-93a5-fd8d2d1e99dd tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Lock "736b110e-7265-42cc-9c9b-35f57c466b0c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.030s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.888594] env[63538]: DEBUG oslo_vmware.api [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]528f1b3f-b2b0-c2a1-6a31-b7a681c28bb2, 'name': SearchDatastore_Task, 'duration_secs': 0.062098} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.891930] env[63538]: DEBUG oslo_concurrency.lockutils [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 747.892194] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 747.892429] env[63538]: DEBUG oslo_concurrency.lockutils [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 747.892578] env[63538]: DEBUG oslo_concurrency.lockutils [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.892760] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 747.893261] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8736b1a-f5df-4c2f-978a-8e197d653981 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.905476] env[63538]: DEBUG oslo_vmware.api [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100776, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.906863] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 747.906995] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 747.907792] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a71b09a-7ace-4287-802f-d66a8143a2c7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.914014] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ec64d88-6117-4064-a351-9866f5767098 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.917820] env[63538]: DEBUG oslo_vmware.api [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 747.917820] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5243c3c7-2863-60a0-c513-e41f75d4fcb0" [ 747.917820] env[63538]: _type = "Task" [ 747.917820] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.925094] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a61e0f-dfb3-4a5e-84e1-0898acf92422 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.935667] env[63538]: DEBUG oslo_vmware.api [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5243c3c7-2863-60a0-c513-e41f75d4fcb0, 'name': SearchDatastore_Task, 'duration_secs': 0.010239} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.936880] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-717a10d8-5664-4096-b10f-a8d9a1131d6c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.966621] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30758200-ba42-4879-8a89-4a6c615dc5d7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.970609] env[63538]: DEBUG oslo_vmware.api [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 747.970609] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52546673-9f4a-d9b5-98e3-4ecd60605f5f" [ 747.970609] env[63538]: _type = "Task" [ 747.970609] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.977968] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7edd378b-8a59-4941-993f-b52418352382 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.985631] env[63538]: DEBUG oslo_vmware.api [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52546673-9f4a-d9b5-98e3-4ecd60605f5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.995780] env[63538]: DEBUG nova.compute.provider_tree [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 748.024457] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100777, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.649365} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.024457] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] e50e95c0-830b-4d71-999b-546b138bf8f4/e50e95c0-830b-4d71-999b-546b138bf8f4.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 748.025820] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 748.025820] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-047f5c2b-e0fd-402c-847c-d1e037758c55 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.035036] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 748.035036] env[63538]: value = "task-5100778" [ 748.035036] env[63538]: _type = "Task" [ 748.035036] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.047608] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100778, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.222752] env[63538]: INFO nova.compute.manager [-] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Took 1.53 seconds to deallocate network for instance. [ 748.274218] env[63538]: DEBUG oslo_concurrency.lockutils [req-5bc1b704-7653-4648-b0a2-cdab306dbd7d req-31c71036-6346-442b-a555-4ccf919f03b2 service nova] Releasing lock "refresh_cache-04dc612b-7987-405b-9716-95c4ff3535ec" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.280915] env[63538]: DEBUG nova.compute.manager [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 748.349036] env[63538]: DEBUG nova.network.neutron [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Updating instance_info_cache with network_info: [{"id": "1d37786b-c74d-41d0-a685-3082d8f007be", "address": "fa:16:3e:3c:ea:7e", "network": {"id": "19a71225-10fa-49f2-ac67-af1873417755", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1401406561-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4efc4733ea894fb7825e52b29ac8b6ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d39252e-42ef-4252-98d3-62af5a0d109d", "external-id": "nsx-vlan-transportzone-190", "segmentation_id": 190, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d37786b-c7", "ovs_interfaceid": "1d37786b-c74d-41d0-a685-3082d8f007be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.406660] env[63538]: DEBUG oslo_vmware.api [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100776, 'name': CreateSnapshot_Task, 'duration_secs': 1.68512} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.406937] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Created Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 748.407726] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e352d5-95b8-4a45-a93d-117c12c55b32 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.483278] env[63538]: DEBUG oslo_vmware.api [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52546673-9f4a-d9b5-98e3-4ecd60605f5f, 'name': SearchDatastore_Task, 'duration_secs': 0.014751} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.483609] env[63538]: DEBUG oslo_concurrency.lockutils [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.483886] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 04dc612b-7987-405b-9716-95c4ff3535ec/04dc612b-7987-405b-9716-95c4ff3535ec.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 748.484179] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-49ff283c-8bc6-4a57-908b-0387daa30333 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.492547] env[63538]: DEBUG oslo_vmware.api [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 748.492547] env[63538]: value = "task-5100779" [ 748.492547] env[63538]: _type = "Task" [ 748.492547] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.503109] env[63538]: DEBUG nova.scheduler.client.report [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 748.506528] env[63538]: DEBUG oslo_vmware.api [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100779, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.547175] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100778, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.26639} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.547390] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 748.548307] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e31f7415-13bd-489c-8f3a-a17712abbc82 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.574639] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] e50e95c0-830b-4d71-999b-546b138bf8f4/e50e95c0-830b-4d71-999b-546b138bf8f4.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 748.575007] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ce0894d-a5a2-4ff1-aa1b-99922dfdedeb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.597083] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 748.597083] env[63538]: value = "task-5100780" [ 748.597083] env[63538]: _type = "Task" [ 748.597083] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.606613] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100780, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.731889] env[63538]: DEBUG oslo_concurrency.lockutils [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.761173] env[63538]: DEBUG nova.compute.manager [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 748.816091] env[63538]: DEBUG nova.virt.hardware [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:50:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='1936118095',id=29,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-1521010580',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 748.816091] env[63538]: DEBUG nova.virt.hardware [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 748.816859] env[63538]: DEBUG nova.virt.hardware [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 748.817978] env[63538]: DEBUG nova.virt.hardware [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 748.818077] env[63538]: DEBUG nova.virt.hardware [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 748.818317] env[63538]: DEBUG nova.virt.hardware [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 748.818468] env[63538]: DEBUG nova.virt.hardware [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 748.818611] env[63538]: DEBUG nova.virt.hardware [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 748.819415] env[63538]: DEBUG nova.virt.hardware [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 748.819754] env[63538]: DEBUG nova.virt.hardware [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 748.819971] env[63538]: DEBUG nova.virt.hardware [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 748.820902] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a965569d-8356-44d9-9af7-2e04feea6825 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.830083] env[63538]: DEBUG oslo_concurrency.lockutils [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.835693] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-287e4529-3289-435e-81bb-41f76d889d8d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.857881] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Releasing lock "refresh_cache-db5993ce-6982-4b82-8f5d-3fe51df8896b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.858262] env[63538]: DEBUG nova.compute.manager [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Instance network_info: |[{"id": "1d37786b-c74d-41d0-a685-3082d8f007be", "address": "fa:16:3e:3c:ea:7e", "network": {"id": "19a71225-10fa-49f2-ac67-af1873417755", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1401406561-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4efc4733ea894fb7825e52b29ac8b6ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d39252e-42ef-4252-98d3-62af5a0d109d", "external-id": "nsx-vlan-transportzone-190", "segmentation_id": 190, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d37786b-c7", "ovs_interfaceid": "1d37786b-c74d-41d0-a685-3082d8f007be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 748.858997] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:ea:7e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9d39252e-42ef-4252-98d3-62af5a0d109d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1d37786b-c74d-41d0-a685-3082d8f007be', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 748.870405] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Creating folder: Project (4efc4733ea894fb7825e52b29ac8b6ba). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 748.871043] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-65cf382f-127b-4cb6-aa63-cedb28fbd753 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.886828] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Created folder: Project (4efc4733ea894fb7825e52b29ac8b6ba) in parent group-v992234. [ 748.887084] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Creating folder: Instances. Parent ref: group-v992351. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 748.887386] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ccf28234-ccdd-4ff3-bf84-d04e65e7126a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.902391] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Created folder: Instances in parent group-v992351. [ 748.902866] env[63538]: DEBUG oslo.service.loopingcall [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 748.903082] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 748.903529] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e01fe21d-cd1d-4a50-9b9f-d4174d1814ad {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.929584] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Creating linked-clone VM from snapshot {{(pid=63538) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 748.931604] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b4b0ccee-d5e7-4c05-bab9-7256ee163f8d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.935953] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 748.935953] env[63538]: value = "task-5100783" [ 748.935953] env[63538]: _type = "Task" [ 748.935953] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.943231] env[63538]: DEBUG oslo_vmware.api [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Waiting for the task: (returnval){ [ 748.943231] env[63538]: value = "task-5100784" [ 748.943231] env[63538]: _type = "Task" [ 748.943231] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.951760] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100783, 'name': CreateVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.960311] env[63538]: DEBUG oslo_vmware.api [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100784, 'name': CloneVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.006094] env[63538]: DEBUG oslo_vmware.api [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100779, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.008194] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.787s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.008751] env[63538]: DEBUG nova.compute.manager [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 749.011887] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.071s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.015054] env[63538]: DEBUG nova.objects.instance [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lazy-loading 'resources' on Instance uuid 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 749.112733] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100780, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.358556] env[63538]: DEBUG nova.compute.manager [req-6f55c329-a272-4abf-aa32-f33d86deeaa1 req-c88563c0-547e-4a73-b2c5-1b23eacb9df3 service nova] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Received event network-vif-plugged-1d37786b-c74d-41d0-a685-3082d8f007be {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 749.358799] env[63538]: DEBUG oslo_concurrency.lockutils [req-6f55c329-a272-4abf-aa32-f33d86deeaa1 req-c88563c0-547e-4a73-b2c5-1b23eacb9df3 service nova] Acquiring lock "db5993ce-6982-4b82-8f5d-3fe51df8896b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.359660] env[63538]: DEBUG oslo_concurrency.lockutils [req-6f55c329-a272-4abf-aa32-f33d86deeaa1 req-c88563c0-547e-4a73-b2c5-1b23eacb9df3 service nova] Lock "db5993ce-6982-4b82-8f5d-3fe51df8896b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.360136] env[63538]: DEBUG oslo_concurrency.lockutils [req-6f55c329-a272-4abf-aa32-f33d86deeaa1 req-c88563c0-547e-4a73-b2c5-1b23eacb9df3 service nova] Lock "db5993ce-6982-4b82-8f5d-3fe51df8896b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.360288] env[63538]: DEBUG nova.compute.manager [req-6f55c329-a272-4abf-aa32-f33d86deeaa1 req-c88563c0-547e-4a73-b2c5-1b23eacb9df3 service nova] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] No waiting events found dispatching network-vif-plugged-1d37786b-c74d-41d0-a685-3082d8f007be {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 749.360380] env[63538]: WARNING nova.compute.manager [req-6f55c329-a272-4abf-aa32-f33d86deeaa1 req-c88563c0-547e-4a73-b2c5-1b23eacb9df3 service nova] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Received unexpected event network-vif-plugged-1d37786b-c74d-41d0-a685-3082d8f007be for instance with vm_state building and task_state spawning. [ 749.360550] env[63538]: DEBUG nova.compute.manager [req-6f55c329-a272-4abf-aa32-f33d86deeaa1 req-c88563c0-547e-4a73-b2c5-1b23eacb9df3 service nova] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Received event network-changed-1d37786b-c74d-41d0-a685-3082d8f007be {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 749.360714] env[63538]: DEBUG nova.compute.manager [req-6f55c329-a272-4abf-aa32-f33d86deeaa1 req-c88563c0-547e-4a73-b2c5-1b23eacb9df3 service nova] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Refreshing instance network info cache due to event network-changed-1d37786b-c74d-41d0-a685-3082d8f007be. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 749.360915] env[63538]: DEBUG oslo_concurrency.lockutils [req-6f55c329-a272-4abf-aa32-f33d86deeaa1 req-c88563c0-547e-4a73-b2c5-1b23eacb9df3 service nova] Acquiring lock "refresh_cache-db5993ce-6982-4b82-8f5d-3fe51df8896b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.361069] env[63538]: DEBUG oslo_concurrency.lockutils [req-6f55c329-a272-4abf-aa32-f33d86deeaa1 req-c88563c0-547e-4a73-b2c5-1b23eacb9df3 service nova] Acquired lock "refresh_cache-db5993ce-6982-4b82-8f5d-3fe51df8896b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.361235] env[63538]: DEBUG nova.network.neutron [req-6f55c329-a272-4abf-aa32-f33d86deeaa1 req-c88563c0-547e-4a73-b2c5-1b23eacb9df3 service nova] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Refreshing network info cache for port 1d37786b-c74d-41d0-a685-3082d8f007be {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 749.454305] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100783, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.462754] env[63538]: DEBUG oslo_vmware.api [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100784, 'name': CloneVM_Task} progress is 93%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.505502] env[63538]: DEBUG oslo_vmware.api [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100779, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.529101} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.505867] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 04dc612b-7987-405b-9716-95c4ff3535ec/04dc612b-7987-405b-9716-95c4ff3535ec.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 749.506246] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 749.506592] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-74a51e8f-cdf2-4c29-9e6f-4cd274ab35c9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.518919] env[63538]: DEBUG nova.compute.utils [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 749.522383] env[63538]: DEBUG oslo_vmware.api [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 749.522383] env[63538]: value = "task-5100785" [ 749.522383] env[63538]: _type = "Task" [ 749.522383] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.523759] env[63538]: DEBUG nova.compute.manager [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 749.523759] env[63538]: DEBUG nova.network.neutron [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 749.539194] env[63538]: DEBUG oslo_vmware.api [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100785, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.607546] env[63538]: DEBUG nova.policy [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '16fdc041f4c74e0ea76ee8984f9786f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a701618902d411b8af203fdbb1069be', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 749.613201] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100780, 'name': ReconfigVM_Task, 'duration_secs': 0.689026} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.617031] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Reconfigured VM instance instance-0000002b to attach disk [datastore1] e50e95c0-830b-4d71-999b-546b138bf8f4/e50e95c0-830b-4d71-999b-546b138bf8f4.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 749.617148] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1257d930-9cf4-4a08-b195-04140e8626e8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.628628] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 749.628628] env[63538]: value = "task-5100786" [ 749.628628] env[63538]: _type = "Task" [ 749.628628] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.640889] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100786, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.726191] env[63538]: DEBUG nova.network.neutron [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Successfully updated port: 6d45b11c-cd3e-4589-9931-5ffdbbc4e193 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 749.951930] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100783, 'name': CreateVM_Task, 'duration_secs': 0.716409} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.951930] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 749.952518] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.952518] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.952859] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 749.956033] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f517099-4ce6-49ef-93a9-740a9b3b9eca {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.959825] env[63538]: DEBUG oslo_vmware.api [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100784, 'name': CloneVM_Task} progress is 93%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.963717] env[63538]: DEBUG oslo_vmware.api [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 749.963717] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5240118f-49d3-3473-8f77-56dfc0604b2f" [ 749.963717] env[63538]: _type = "Task" [ 749.963717] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.979954] env[63538]: DEBUG oslo_vmware.api [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5240118f-49d3-3473-8f77-56dfc0604b2f, 'name': SearchDatastore_Task, 'duration_secs': 0.010749} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.979954] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.979954] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 749.979954] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.979954] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.979954] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 749.979954] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e848a42-2082-4dc5-a13a-7b0bd0540c47 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.989858] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 749.989858] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 749.990446] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9e27986-92d1-4893-a6e5-57aab7f18c68 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.004375] env[63538]: DEBUG oslo_vmware.api [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 750.004375] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524868ef-db33-807d-ff9c-3183031f1fc3" [ 750.004375] env[63538]: _type = "Task" [ 750.004375] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.015690] env[63538]: DEBUG oslo_vmware.api [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524868ef-db33-807d-ff9c-3183031f1fc3, 'name': SearchDatastore_Task, 'duration_secs': 0.010784} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.016612] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f85ae492-4881-4b56-bf07-6ce08a38b485 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.023369] env[63538]: DEBUG nova.compute.manager [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 750.038882] env[63538]: DEBUG oslo_vmware.api [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 750.038882] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522817cc-729f-27c0-c4ae-27c5b558c553" [ 750.038882] env[63538]: _type = "Task" [ 750.038882] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.056499] env[63538]: DEBUG oslo_vmware.api [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100785, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094508} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.057289] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 750.063453] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e74f726c-9741-47a0-a2e8-e9ded677b470 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.079315] env[63538]: DEBUG oslo_vmware.api [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522817cc-729f-27c0-c4ae-27c5b558c553, 'name': SearchDatastore_Task, 'duration_secs': 0.010688} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.080802] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.081108] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] db5993ce-6982-4b82-8f5d-3fe51df8896b/db5993ce-6982-4b82-8f5d-3fe51df8896b.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 750.081691] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6c1824c7-42d5-4a35-9b0c-1387c995eacf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.113291] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] 04dc612b-7987-405b-9716-95c4ff3535ec/04dc612b-7987-405b-9716-95c4ff3535ec.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 750.118322] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ec0ddf4-7425-4e24-be6b-844aa8fbf02e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.139030] env[63538]: DEBUG nova.network.neutron [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Successfully created port: 2e0047f2-712d-4e63-b423-df4605d54382 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 750.142614] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "87f8bb3e-6f32-4850-ac54-efad0befb268" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.142811] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "87f8bb3e-6f32-4850-ac54-efad0befb268" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.143110] env[63538]: DEBUG oslo_vmware.api [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 750.143110] env[63538]: value = "task-5100787" [ 750.143110] env[63538]: _type = "Task" [ 750.143110] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.144399] env[63538]: DEBUG nova.compute.manager [req-a26861b3-6e72-44a2-b985-7510751dad23 req-b44482fb-a656-4e0e-859a-81c107d8316d service nova] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Received event network-vif-plugged-6d45b11c-cd3e-4589-9931-5ffdbbc4e193 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 750.144608] env[63538]: DEBUG oslo_concurrency.lockutils [req-a26861b3-6e72-44a2-b985-7510751dad23 req-b44482fb-a656-4e0e-859a-81c107d8316d service nova] Acquiring lock "5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.144815] env[63538]: DEBUG oslo_concurrency.lockutils [req-a26861b3-6e72-44a2-b985-7510751dad23 req-b44482fb-a656-4e0e-859a-81c107d8316d service nova] Lock "5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.144995] env[63538]: DEBUG oslo_concurrency.lockutils [req-a26861b3-6e72-44a2-b985-7510751dad23 req-b44482fb-a656-4e0e-859a-81c107d8316d service nova] Lock "5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.145177] env[63538]: DEBUG nova.compute.manager [req-a26861b3-6e72-44a2-b985-7510751dad23 req-b44482fb-a656-4e0e-859a-81c107d8316d service nova] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] No waiting events found dispatching network-vif-plugged-6d45b11c-cd3e-4589-9931-5ffdbbc4e193 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 750.145348] env[63538]: WARNING nova.compute.manager [req-a26861b3-6e72-44a2-b985-7510751dad23 req-b44482fb-a656-4e0e-859a-81c107d8316d service nova] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Received unexpected event network-vif-plugged-6d45b11c-cd3e-4589-9931-5ffdbbc4e193 for instance with vm_state building and task_state spawning. [ 750.158209] env[63538]: DEBUG oslo_vmware.api [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 750.158209] env[63538]: value = "task-5100788" [ 750.158209] env[63538]: _type = "Task" [ 750.158209] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.160389] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100786, 'name': Rename_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.168990] env[63538]: DEBUG oslo_vmware.api [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100787, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.177307] env[63538]: DEBUG oslo_vmware.api [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100788, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.231053] env[63538]: DEBUG oslo_concurrency.lockutils [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Acquiring lock "refresh_cache-5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.231253] env[63538]: DEBUG oslo_concurrency.lockutils [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Acquired lock "refresh_cache-5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.231479] env[63538]: DEBUG nova.network.neutron [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 750.329595] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2129fa60-b39a-445e-b662-6f98a16c360e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.342254] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8282b5e8-e9a5-4e56-9605-fa6c4d1805ce {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.381705] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67f59d98-96e0-4fab-aaf6-33a628f1d078 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.392761] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3640aa10-9a81-478a-b510-2f916d450340 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.397772] env[63538]: DEBUG nova.network.neutron [req-6f55c329-a272-4abf-aa32-f33d86deeaa1 req-c88563c0-547e-4a73-b2c5-1b23eacb9df3 service nova] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Updated VIF entry in instance network info cache for port 1d37786b-c74d-41d0-a685-3082d8f007be. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 750.398159] env[63538]: DEBUG nova.network.neutron [req-6f55c329-a272-4abf-aa32-f33d86deeaa1 req-c88563c0-547e-4a73-b2c5-1b23eacb9df3 service nova] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Updating instance_info_cache with network_info: [{"id": "1d37786b-c74d-41d0-a685-3082d8f007be", "address": "fa:16:3e:3c:ea:7e", "network": {"id": "19a71225-10fa-49f2-ac67-af1873417755", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1401406561-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4efc4733ea894fb7825e52b29ac8b6ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d39252e-42ef-4252-98d3-62af5a0d109d", "external-id": "nsx-vlan-transportzone-190", "segmentation_id": 190, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d37786b-c7", "ovs_interfaceid": "1d37786b-c74d-41d0-a685-3082d8f007be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.411836] env[63538]: DEBUG nova.compute.provider_tree [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 750.459794] env[63538]: DEBUG oslo_vmware.api [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100784, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.653613] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100786, 'name': Rename_Task, 'duration_secs': 0.662326} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.657043] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 750.657358] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5a0f7f9f-9d25-457f-b129-cb9d2c13b110 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.668983] env[63538]: DEBUG oslo_vmware.api [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100787, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513954} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.668983] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 750.668983] env[63538]: value = "task-5100789" [ 750.668983] env[63538]: _type = "Task" [ 750.668983] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.669537] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] db5993ce-6982-4b82-8f5d-3fe51df8896b/db5993ce-6982-4b82-8f5d-3fe51df8896b.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 750.669766] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 750.670112] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6a5b3ca9-6f84-424d-8774-ad77dbc082be {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.681926] env[63538]: DEBUG oslo_vmware.api [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100788, 'name': ReconfigVM_Task, 'duration_secs': 0.409069} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.681926] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Reconfigured VM instance instance-0000002c to attach disk [datastore1] 04dc612b-7987-405b-9716-95c4ff3535ec/04dc612b-7987-405b-9716-95c4ff3535ec.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 750.682176] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d1731a50-01c8-43d9-89b5-88bb22290cca {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.693035] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100789, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.693035] env[63538]: DEBUG oslo_vmware.api [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 750.693035] env[63538]: value = "task-5100790" [ 750.693035] env[63538]: _type = "Task" [ 750.693035] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.697081] env[63538]: DEBUG oslo_vmware.api [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 750.697081] env[63538]: value = "task-5100791" [ 750.697081] env[63538]: _type = "Task" [ 750.697081] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.706663] env[63538]: DEBUG oslo_vmware.api [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100790, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.711610] env[63538]: DEBUG oslo_vmware.api [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100791, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.798038] env[63538]: DEBUG nova.network.neutron [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 750.901559] env[63538]: DEBUG oslo_concurrency.lockutils [req-6f55c329-a272-4abf-aa32-f33d86deeaa1 req-c88563c0-547e-4a73-b2c5-1b23eacb9df3 service nova] Releasing lock "refresh_cache-db5993ce-6982-4b82-8f5d-3fe51df8896b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.901872] env[63538]: DEBUG nova.compute.manager [req-6f55c329-a272-4abf-aa32-f33d86deeaa1 req-c88563c0-547e-4a73-b2c5-1b23eacb9df3 service nova] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Received event network-vif-deleted-e7647c9c-cea3-4b43-ba7e-69aaac6286a8 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 750.914946] env[63538]: DEBUG nova.scheduler.client.report [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 750.959233] env[63538]: DEBUG oslo_vmware.api [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100784, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.035496] env[63538]: DEBUG nova.compute.manager [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 751.076238] env[63538]: DEBUG nova.virt.hardware [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 751.076238] env[63538]: DEBUG nova.virt.hardware [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 751.076238] env[63538]: DEBUG nova.virt.hardware [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 751.076238] env[63538]: DEBUG nova.virt.hardware [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 751.076238] env[63538]: DEBUG nova.virt.hardware [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 751.076238] env[63538]: DEBUG nova.virt.hardware [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 751.076674] env[63538]: DEBUG nova.virt.hardware [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 751.077055] env[63538]: DEBUG nova.virt.hardware [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 751.077385] env[63538]: DEBUG nova.virt.hardware [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 751.078095] env[63538]: DEBUG nova.virt.hardware [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 751.078437] env[63538]: DEBUG nova.virt.hardware [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 751.079842] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf031748-6996-472c-8505-6f0ccd948b68 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.086324] env[63538]: DEBUG nova.network.neutron [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Updating instance_info_cache with network_info: [{"id": "6d45b11c-cd3e-4589-9931-5ffdbbc4e193", "address": "fa:16:3e:cd:d2:4d", "network": {"id": "c0e827ce-4abc-4178-9811-36625c1d6ebc", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1894083828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6480e3bc216427d939223b9e3b6a21b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69351262-8d39-441a-85ba-3a78df436d17", "external-id": "nsx-vlan-transportzone-205", "segmentation_id": 205, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d45b11c-cd", "ovs_interfaceid": "6d45b11c-cd3e-4589-9931-5ffdbbc4e193", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.097984] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff9cec8d-ec35-40f9-b6c8-d0653d658626 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.184591] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100789, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.207029] env[63538]: DEBUG oslo_vmware.api [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100790, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070454} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.207816] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 751.208664] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81dc0c27-2286-44c1-8797-76c45579a9be {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.215889] env[63538]: DEBUG oslo_vmware.api [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100791, 'name': Rename_Task, 'duration_secs': 0.214484} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.216751] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 751.217054] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b44f4ad7-e6ec-43e9-bd32-a196b9453cad {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.237830] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] db5993ce-6982-4b82-8f5d-3fe51df8896b/db5993ce-6982-4b82-8f5d-3fe51df8896b.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 751.238533] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9be94d3d-4cc0-4926-9fcf-e10f73aff2c3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.259069] env[63538]: DEBUG oslo_vmware.api [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 751.259069] env[63538]: value = "task-5100792" [ 751.259069] env[63538]: _type = "Task" [ 751.259069] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.269129] env[63538]: DEBUG oslo_vmware.api [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 751.269129] env[63538]: value = "task-5100793" [ 751.269129] env[63538]: _type = "Task" [ 751.269129] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.277727] env[63538]: DEBUG oslo_vmware.api [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100792, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.284964] env[63538]: DEBUG oslo_vmware.api [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100793, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.421256] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.409s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 751.424134] env[63538]: DEBUG oslo_concurrency.lockutils [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.936s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.426057] env[63538]: INFO nova.compute.claims [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 751.441949] env[63538]: INFO nova.scheduler.client.report [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Deleted allocations for instance 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7 [ 751.467629] env[63538]: DEBUG oslo_vmware.api [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100784, 'name': CloneVM_Task, 'duration_secs': 2.377512} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.467805] env[63538]: INFO nova.virt.vmwareapi.vmops [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Created linked-clone VM from snapshot [ 751.471059] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb65a6d2-cf32-4d55-8b90-cd6b399fec10 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.483663] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Uploading image 29f321f7-1ca8-4699-a767-09b4915b3bd0 {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 751.519228] env[63538]: DEBUG oslo_vmware.rw_handles [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 751.519228] env[63538]: value = "vm-992354" [ 751.519228] env[63538]: _type = "VirtualMachine" [ 751.519228] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 751.519751] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-08ef009b-1c8f-4fa2-b48d-34b59a8c0008 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.530094] env[63538]: DEBUG oslo_vmware.rw_handles [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Lease: (returnval){ [ 751.530094] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52108ca4-2e91-8e6a-1083-b6861fd3e83e" [ 751.530094] env[63538]: _type = "HttpNfcLease" [ 751.530094] env[63538]: } obtained for exporting VM: (result){ [ 751.530094] env[63538]: value = "vm-992354" [ 751.530094] env[63538]: _type = "VirtualMachine" [ 751.530094] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 751.530534] env[63538]: DEBUG oslo_vmware.api [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Waiting for the lease: (returnval){ [ 751.530534] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52108ca4-2e91-8e6a-1083-b6861fd3e83e" [ 751.530534] env[63538]: _type = "HttpNfcLease" [ 751.530534] env[63538]: } to be ready. {{(pid=63538) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 751.538707] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 751.538707] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52108ca4-2e91-8e6a-1083-b6861fd3e83e" [ 751.538707] env[63538]: _type = "HttpNfcLease" [ 751.538707] env[63538]: } is initializing. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 751.594692] env[63538]: DEBUG oslo_concurrency.lockutils [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Releasing lock "refresh_cache-5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.595354] env[63538]: DEBUG nova.compute.manager [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Instance network_info: |[{"id": "6d45b11c-cd3e-4589-9931-5ffdbbc4e193", "address": "fa:16:3e:cd:d2:4d", "network": {"id": "c0e827ce-4abc-4178-9811-36625c1d6ebc", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1894083828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6480e3bc216427d939223b9e3b6a21b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69351262-8d39-441a-85ba-3a78df436d17", "external-id": "nsx-vlan-transportzone-205", "segmentation_id": 205, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d45b11c-cd", "ovs_interfaceid": "6d45b11c-cd3e-4589-9931-5ffdbbc4e193", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 751.595707] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:d2:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69351262-8d39-441a-85ba-3a78df436d17', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6d45b11c-cd3e-4589-9931-5ffdbbc4e193', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 751.604090] env[63538]: DEBUG oslo.service.loopingcall [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 751.604480] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 751.604845] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8013aa99-c742-4d62-8a6b-ba79a0398eec {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.630076] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 751.630076] env[63538]: value = "task-5100795" [ 751.630076] env[63538]: _type = "Task" [ 751.630076] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.639627] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100795, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.682176] env[63538]: DEBUG oslo_vmware.api [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100789, 'name': PowerOnVM_Task, 'duration_secs': 0.930498} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.683016] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 751.683016] env[63538]: INFO nova.compute.manager [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Took 11.75 seconds to spawn the instance on the hypervisor. [ 751.683016] env[63538]: DEBUG nova.compute.manager [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 751.684185] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a92d1d45-e606-4e1f-ba37-e7c1fc33d982 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.713334] env[63538]: DEBUG nova.compute.manager [req-957fe273-9348-492a-9304-28464c350e6b req-8d0d098b-dc6a-486a-b7b3-dbcba280cf5d service nova] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Received event network-changed-27cdea75-ceda-4390-8313-cfbd1681ebd2 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 751.713600] env[63538]: DEBUG nova.compute.manager [req-957fe273-9348-492a-9304-28464c350e6b req-8d0d098b-dc6a-486a-b7b3-dbcba280cf5d service nova] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Refreshing instance network info cache due to event network-changed-27cdea75-ceda-4390-8313-cfbd1681ebd2. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 751.713855] env[63538]: DEBUG oslo_concurrency.lockutils [req-957fe273-9348-492a-9304-28464c350e6b req-8d0d098b-dc6a-486a-b7b3-dbcba280cf5d service nova] Acquiring lock "refresh_cache-736b110e-7265-42cc-9c9b-35f57c466b0c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.713999] env[63538]: DEBUG oslo_concurrency.lockutils [req-957fe273-9348-492a-9304-28464c350e6b req-8d0d098b-dc6a-486a-b7b3-dbcba280cf5d service nova] Acquired lock "refresh_cache-736b110e-7265-42cc-9c9b-35f57c466b0c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.714185] env[63538]: DEBUG nova.network.neutron [req-957fe273-9348-492a-9304-28464c350e6b req-8d0d098b-dc6a-486a-b7b3-dbcba280cf5d service nova] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Refreshing network info cache for port 27cdea75-ceda-4390-8313-cfbd1681ebd2 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 751.727811] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "de68a921-bf67-4794-923d-4e062d8ff802" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.728067] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "de68a921-bf67-4794-923d-4e062d8ff802" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.770516] env[63538]: DEBUG oslo_vmware.api [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100792, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.780494] env[63538]: DEBUG oslo_vmware.api [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100793, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.954814] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ff40a1e-65aa-4860-8888-7480fa4e3de4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.028s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.043155] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 752.043155] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52108ca4-2e91-8e6a-1083-b6861fd3e83e" [ 752.043155] env[63538]: _type = "HttpNfcLease" [ 752.043155] env[63538]: } is ready. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 752.044250] env[63538]: DEBUG oslo_vmware.rw_handles [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 752.044250] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52108ca4-2e91-8e6a-1083-b6861fd3e83e" [ 752.044250] env[63538]: _type = "HttpNfcLease" [ 752.044250] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 752.045618] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78270e41-8e1d-45aa-ad57-8c485e7e9eb2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.057168] env[63538]: DEBUG oslo_vmware.rw_handles [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522ebe39-e366-15f6-47d1-3f6ffbb0df2f/disk-0.vmdk from lease info. {{(pid=63538) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 752.057882] env[63538]: DEBUG oslo_vmware.rw_handles [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522ebe39-e366-15f6-47d1-3f6ffbb0df2f/disk-0.vmdk for reading. {{(pid=63538) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 752.143427] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100795, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.192783] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-46b45bf7-2921-4575-be7a-262d1b160a53 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.209243] env[63538]: INFO nova.compute.manager [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Took 61.94 seconds to build instance. [ 752.271690] env[63538]: DEBUG oslo_vmware.api [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100792, 'name': PowerOnVM_Task, 'duration_secs': 0.783458} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.275791] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 752.276228] env[63538]: INFO nova.compute.manager [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Took 9.57 seconds to spawn the instance on the hypervisor. [ 752.276467] env[63538]: DEBUG nova.compute.manager [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 752.277334] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479fec84-1fb6-4d12-aea8-56fb23fadd00 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.287127] env[63538]: DEBUG oslo_vmware.api [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100793, 'name': ReconfigVM_Task, 'duration_secs': 0.819201} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.288930] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Reconfigured VM instance instance-0000002d to attach disk [datastore1] db5993ce-6982-4b82-8f5d-3fe51df8896b/db5993ce-6982-4b82-8f5d-3fe51df8896b.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 752.297993] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-42f381ba-de18-41db-bd1f-64147a93d0b7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.308523] env[63538]: DEBUG oslo_vmware.api [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 752.308523] env[63538]: value = "task-5100796" [ 752.308523] env[63538]: _type = "Task" [ 752.308523] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.319433] env[63538]: DEBUG oslo_vmware.api [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100796, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.430143] env[63538]: DEBUG nova.network.neutron [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Successfully updated port: 2e0047f2-712d-4e63-b423-df4605d54382 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 752.520115] env[63538]: DEBUG nova.compute.manager [req-92214238-0c6b-4549-97b7-8df59f663645 req-fd1543a9-5658-42f6-9afd-9478134d943b service nova] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Received event network-changed-6d45b11c-cd3e-4589-9931-5ffdbbc4e193 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 752.520418] env[63538]: DEBUG nova.compute.manager [req-92214238-0c6b-4549-97b7-8df59f663645 req-fd1543a9-5658-42f6-9afd-9478134d943b service nova] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Refreshing instance network info cache due to event network-changed-6d45b11c-cd3e-4589-9931-5ffdbbc4e193. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 752.520695] env[63538]: DEBUG oslo_concurrency.lockutils [req-92214238-0c6b-4549-97b7-8df59f663645 req-fd1543a9-5658-42f6-9afd-9478134d943b service nova] Acquiring lock "refresh_cache-5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 752.521507] env[63538]: DEBUG oslo_concurrency.lockutils [req-92214238-0c6b-4549-97b7-8df59f663645 req-fd1543a9-5658-42f6-9afd-9478134d943b service nova] Acquired lock "refresh_cache-5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.521507] env[63538]: DEBUG nova.network.neutron [req-92214238-0c6b-4549-97b7-8df59f663645 req-fd1543a9-5658-42f6-9afd-9478134d943b service nova] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Refreshing network info cache for port 6d45b11c-cd3e-4589-9931-5ffdbbc4e193 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 752.645965] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100795, 'name': CreateVM_Task, 'duration_secs': 0.673165} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.645965] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 752.646718] env[63538]: DEBUG oslo_concurrency.lockutils [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 752.647037] env[63538]: DEBUG oslo_concurrency.lockutils [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.648065] env[63538]: DEBUG oslo_concurrency.lockutils [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 752.648546] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-787a2fbf-fe6b-4dfc-b4c1-2d7086ffd098 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.661196] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for the task: (returnval){ [ 752.661196] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526be60a-15a1-0064-6536-6398311ba9f4" [ 752.661196] env[63538]: _type = "Task" [ 752.661196] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.671448] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526be60a-15a1-0064-6536-6398311ba9f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.712096] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b7560eab-4f25-422c-831d-91c12bac0abe tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "e50e95c0-830b-4d71-999b-546b138bf8f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 104.498s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.810057] env[63538]: DEBUG nova.network.neutron [req-957fe273-9348-492a-9304-28464c350e6b req-8d0d098b-dc6a-486a-b7b3-dbcba280cf5d service nova] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Updated VIF entry in instance network info cache for port 27cdea75-ceda-4390-8313-cfbd1681ebd2. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 752.810057] env[63538]: DEBUG nova.network.neutron [req-957fe273-9348-492a-9304-28464c350e6b req-8d0d098b-dc6a-486a-b7b3-dbcba280cf5d service nova] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Updating instance_info_cache with network_info: [{"id": "27cdea75-ceda-4390-8313-cfbd1681ebd2", "address": "fa:16:3e:b7:0c:84", "network": {"id": "4f44aa50-ad3b-4530-bd6b-0adbebed9d1d", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-2084769802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.174", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a75dacc8d26c466bb9bd9e8c5d8acbf5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27cdea75-ce", "ovs_interfaceid": "27cdea75-ceda-4390-8313-cfbd1681ebd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.814632] env[63538]: INFO nova.compute.manager [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Took 58.53 seconds to build instance. [ 752.822257] env[63538]: DEBUG oslo_vmware.api [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100796, 'name': Rename_Task, 'duration_secs': 0.257991} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.823163] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 752.823163] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ded59df2-698a-4ce5-a420-d4015d663087 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.832127] env[63538]: DEBUG oslo_vmware.api [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 752.832127] env[63538]: value = "task-5100797" [ 752.832127] env[63538]: _type = "Task" [ 752.832127] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.844518] env[63538]: DEBUG oslo_vmware.api [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100797, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.934759] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "refresh_cache-707a79e2-f5db-479c-b719-1e040935cda3" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 752.934759] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired lock "refresh_cache-707a79e2-f5db-479c-b719-1e040935cda3" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.934759] env[63538]: DEBUG nova.network.neutron [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 752.940448] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6bbdfce1-6fcc-438b-8875-a0f444ff6987 tempest-ServersAdminTestJSON-243259323 tempest-ServersAdminTestJSON-243259323-project-admin] Acquiring lock "refresh_cache-e50e95c0-830b-4d71-999b-546b138bf8f4" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 752.940703] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6bbdfce1-6fcc-438b-8875-a0f444ff6987 tempest-ServersAdminTestJSON-243259323 tempest-ServersAdminTestJSON-243259323-project-admin] Acquired lock "refresh_cache-e50e95c0-830b-4d71-999b-546b138bf8f4" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.943226] env[63538]: DEBUG nova.network.neutron [None req-6bbdfce1-6fcc-438b-8875-a0f444ff6987 tempest-ServersAdminTestJSON-243259323 tempest-ServersAdminTestJSON-243259323-project-admin] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 753.099902] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bdc09c8-767a-419d-a7c2-ccc8ec8d381d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.116269] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd29be9-5798-481c-968d-1a5dc215f0c3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.152519] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d0aea86-de3a-4aaa-988a-cc6cbfe35729 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.159868] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5f39d2d-82e2-45f8-b46d-d27ba313f084 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.179191] env[63538]: DEBUG nova.compute.provider_tree [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 753.184325] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526be60a-15a1-0064-6536-6398311ba9f4, 'name': SearchDatastore_Task, 'duration_secs': 0.013684} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.184972] env[63538]: DEBUG oslo_concurrency.lockutils [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.185240] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 753.186236] env[63538]: DEBUG oslo_concurrency.lockutils [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.186236] env[63538]: DEBUG oslo_concurrency.lockutils [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.186236] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 753.186236] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c71a0d48-a743-432e-9136-75398a8d4cb8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.200839] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 753.200839] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 753.200839] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e2f259a-f99b-4be0-9482-2d4ecbc77eb9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.209514] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for the task: (returnval){ [ 753.209514] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f9ec75-a701-826d-92d7-4b8ecdd0fdbf" [ 753.209514] env[63538]: _type = "Task" [ 753.209514] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.216135] env[63538]: DEBUG nova.compute.manager [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 753.222499] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f9ec75-a701-826d-92d7-4b8ecdd0fdbf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.316079] env[63538]: DEBUG oslo_concurrency.lockutils [req-957fe273-9348-492a-9304-28464c350e6b req-8d0d098b-dc6a-486a-b7b3-dbcba280cf5d service nova] Releasing lock "refresh_cache-736b110e-7265-42cc-9c9b-35f57c466b0c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.316888] env[63538]: DEBUG oslo_concurrency.lockutils [None req-339cb096-80a8-49dc-b7dd-2063925e840e tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "04dc612b-7987-405b-9716-95c4ff3535ec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.222s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.344601] env[63538]: DEBUG oslo_vmware.api [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100797, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.484323] env[63538]: DEBUG nova.network.neutron [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 753.686802] env[63538]: DEBUG nova.scheduler.client.report [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 753.722355] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f9ec75-a701-826d-92d7-4b8ecdd0fdbf, 'name': SearchDatastore_Task, 'duration_secs': 0.011548} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.725572] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf6158a6-cb22-41ac-b21b-2ada3f48e22e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.732993] env[63538]: DEBUG nova.network.neutron [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Updating instance_info_cache with network_info: [{"id": "2e0047f2-712d-4e63-b423-df4605d54382", "address": "fa:16:3e:aa:f2:b5", "network": {"id": "8facc2ff-7a17-4beb-ad4f-7cf9d95cc93c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-137157780-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a701618902d411b8af203fdbb1069be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e0047f2-71", "ovs_interfaceid": "2e0047f2-712d-4e63-b423-df4605d54382", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.742603] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for the task: (returnval){ [ 753.742603] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ecb3a1-491a-2e98-d183-f463e80dd41d" [ 753.742603] env[63538]: _type = "Task" [ 753.742603] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.750302] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.758288] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ecb3a1-491a-2e98-d183-f463e80dd41d, 'name': SearchDatastore_Task, 'duration_secs': 0.012334} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.758584] env[63538]: DEBUG oslo_concurrency.lockutils [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.758845] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b/5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 753.759136] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4db2312c-180a-428e-a442-c2e1c39e734d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.769918] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for the task: (returnval){ [ 753.769918] env[63538]: value = "task-5100798" [ 753.769918] env[63538]: _type = "Task" [ 753.769918] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.782838] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100798, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.823868] env[63538]: DEBUG nova.network.neutron [req-92214238-0c6b-4549-97b7-8df59f663645 req-fd1543a9-5658-42f6-9afd-9478134d943b service nova] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Updated VIF entry in instance network info cache for port 6d45b11c-cd3e-4589-9931-5ffdbbc4e193. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 753.823868] env[63538]: DEBUG nova.network.neutron [req-92214238-0c6b-4549-97b7-8df59f663645 req-fd1543a9-5658-42f6-9afd-9478134d943b service nova] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Updating instance_info_cache with network_info: [{"id": "6d45b11c-cd3e-4589-9931-5ffdbbc4e193", "address": "fa:16:3e:cd:d2:4d", "network": {"id": "c0e827ce-4abc-4178-9811-36625c1d6ebc", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1894083828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6480e3bc216427d939223b9e3b6a21b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69351262-8d39-441a-85ba-3a78df436d17", "external-id": "nsx-vlan-transportzone-205", "segmentation_id": 205, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d45b11c-cd", "ovs_interfaceid": "6d45b11c-cd3e-4589-9931-5ffdbbc4e193", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.825336] env[63538]: DEBUG nova.compute.manager [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 753.851950] env[63538]: DEBUG oslo_vmware.api [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100797, 'name': PowerOnVM_Task, 'duration_secs': 0.858167} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.852368] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 753.852815] env[63538]: INFO nova.compute.manager [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Took 8.37 seconds to spawn the instance on the hypervisor. [ 753.853182] env[63538]: DEBUG nova.compute.manager [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 753.854345] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-886b8302-4ca4-4bd2-9237-4fde689f2c3f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.194781] env[63538]: DEBUG oslo_concurrency.lockutils [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.771s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.195425] env[63538]: DEBUG nova.compute.manager [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 754.199746] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.372s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.202427] env[63538]: INFO nova.compute.claims [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 754.207165] env[63538]: DEBUG nova.network.neutron [None req-6bbdfce1-6fcc-438b-8875-a0f444ff6987 tempest-ServersAdminTestJSON-243259323 tempest-ServersAdminTestJSON-243259323-project-admin] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Updating instance_info_cache with network_info: [{"id": "facecb08-5587-4113-9009-ad339833d9ab", "address": "fa:16:3e:d8:f0:f8", "network": {"id": "2801e625-b23c-455a-80ae-80f2a4f34148", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1431071033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1f0c999ede418c866074d9276050ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfacecb08-55", "ovs_interfaceid": "facecb08-5587-4113-9009-ad339833d9ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.238731] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Releasing lock "refresh_cache-707a79e2-f5db-479c-b719-1e040935cda3" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 754.238731] env[63538]: DEBUG nova.compute.manager [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Instance network_info: |[{"id": "2e0047f2-712d-4e63-b423-df4605d54382", "address": "fa:16:3e:aa:f2:b5", "network": {"id": "8facc2ff-7a17-4beb-ad4f-7cf9d95cc93c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-137157780-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a701618902d411b8af203fdbb1069be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e0047f2-71", "ovs_interfaceid": "2e0047f2-712d-4e63-b423-df4605d54382", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 754.238731] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:f2:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b80dd748-3d7e-4a23-a38d-9e79a3881452', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2e0047f2-712d-4e63-b423-df4605d54382', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 754.249206] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Creating folder: Project (2a701618902d411b8af203fdbb1069be). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 754.249984] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2f8b8d4a-9d81-42fd-9e4c-73ef00da2e06 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.266548] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Created folder: Project (2a701618902d411b8af203fdbb1069be) in parent group-v992234. [ 754.266548] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Creating folder: Instances. Parent ref: group-v992356. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 754.266548] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b0526c8b-2561-4849-af98-d0913df42b56 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.280872] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Created folder: Instances in parent group-v992356. [ 754.280872] env[63538]: DEBUG oslo.service.loopingcall [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 754.285040] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 754.285494] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100798, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.285749] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a46adff1-c1b5-495d-b28d-c1152dd60950 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.322162] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 754.322162] env[63538]: value = "task-5100801" [ 754.322162] env[63538]: _type = "Task" [ 754.322162] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.332652] env[63538]: DEBUG oslo_concurrency.lockutils [req-92214238-0c6b-4549-97b7-8df59f663645 req-fd1543a9-5658-42f6-9afd-9478134d943b service nova] Releasing lock "refresh_cache-5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 754.332789] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100801, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.354927] env[63538]: DEBUG oslo_concurrency.lockutils [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.376014] env[63538]: INFO nova.compute.manager [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Took 51.89 seconds to build instance. [ 754.557480] env[63538]: DEBUG oslo_concurrency.lockutils [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.557480] env[63538]: DEBUG oslo_concurrency.lockutils [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.589377] env[63538]: DEBUG nova.compute.manager [req-6c025540-2301-42bd-8d2f-d38b6ee0d468 req-7e3fae15-ca04-483d-a56a-ec904cc183b5 service nova] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Received event network-vif-plugged-2e0047f2-712d-4e63-b423-df4605d54382 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 754.590550] env[63538]: DEBUG oslo_concurrency.lockutils [req-6c025540-2301-42bd-8d2f-d38b6ee0d468 req-7e3fae15-ca04-483d-a56a-ec904cc183b5 service nova] Acquiring lock "707a79e2-f5db-479c-b719-1e040935cda3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.591164] env[63538]: DEBUG oslo_concurrency.lockutils [req-6c025540-2301-42bd-8d2f-d38b6ee0d468 req-7e3fae15-ca04-483d-a56a-ec904cc183b5 service nova] Lock "707a79e2-f5db-479c-b719-1e040935cda3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.591814] env[63538]: DEBUG oslo_concurrency.lockutils [req-6c025540-2301-42bd-8d2f-d38b6ee0d468 req-7e3fae15-ca04-483d-a56a-ec904cc183b5 service nova] Lock "707a79e2-f5db-479c-b719-1e040935cda3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.594047] env[63538]: DEBUG nova.compute.manager [req-6c025540-2301-42bd-8d2f-d38b6ee0d468 req-7e3fae15-ca04-483d-a56a-ec904cc183b5 service nova] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] No waiting events found dispatching network-vif-plugged-2e0047f2-712d-4e63-b423-df4605d54382 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 754.594047] env[63538]: WARNING nova.compute.manager [req-6c025540-2301-42bd-8d2f-d38b6ee0d468 req-7e3fae15-ca04-483d-a56a-ec904cc183b5 service nova] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Received unexpected event network-vif-plugged-2e0047f2-712d-4e63-b423-df4605d54382 for instance with vm_state building and task_state spawning. [ 754.594047] env[63538]: DEBUG nova.compute.manager [req-6c025540-2301-42bd-8d2f-d38b6ee0d468 req-7e3fae15-ca04-483d-a56a-ec904cc183b5 service nova] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Received event network-changed-2e0047f2-712d-4e63-b423-df4605d54382 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 754.594047] env[63538]: DEBUG nova.compute.manager [req-6c025540-2301-42bd-8d2f-d38b6ee0d468 req-7e3fae15-ca04-483d-a56a-ec904cc183b5 service nova] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Refreshing instance network info cache due to event network-changed-2e0047f2-712d-4e63-b423-df4605d54382. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 754.594047] env[63538]: DEBUG oslo_concurrency.lockutils [req-6c025540-2301-42bd-8d2f-d38b6ee0d468 req-7e3fae15-ca04-483d-a56a-ec904cc183b5 service nova] Acquiring lock "refresh_cache-707a79e2-f5db-479c-b719-1e040935cda3" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 754.594821] env[63538]: DEBUG oslo_concurrency.lockutils [req-6c025540-2301-42bd-8d2f-d38b6ee0d468 req-7e3fae15-ca04-483d-a56a-ec904cc183b5 service nova] Acquired lock "refresh_cache-707a79e2-f5db-479c-b719-1e040935cda3" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.595331] env[63538]: DEBUG nova.network.neutron [req-6c025540-2301-42bd-8d2f-d38b6ee0d468 req-7e3fae15-ca04-483d-a56a-ec904cc183b5 service nova] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Refreshing network info cache for port 2e0047f2-712d-4e63-b423-df4605d54382 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 754.708337] env[63538]: DEBUG nova.compute.utils [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 754.710503] env[63538]: DEBUG nova.compute.manager [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 754.710503] env[63538]: DEBUG nova.network.neutron [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 754.716544] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6bbdfce1-6fcc-438b-8875-a0f444ff6987 tempest-ServersAdminTestJSON-243259323 tempest-ServersAdminTestJSON-243259323-project-admin] Releasing lock "refresh_cache-e50e95c0-830b-4d71-999b-546b138bf8f4" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 754.716544] env[63538]: DEBUG nova.compute.manager [None req-6bbdfce1-6fcc-438b-8875-a0f444ff6987 tempest-ServersAdminTestJSON-243259323 tempest-ServersAdminTestJSON-243259323-project-admin] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Inject network info {{(pid=63538) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7264}} [ 754.716544] env[63538]: DEBUG nova.compute.manager [None req-6bbdfce1-6fcc-438b-8875-a0f444ff6987 tempest-ServersAdminTestJSON-243259323 tempest-ServersAdminTestJSON-243259323-project-admin] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] network_info to inject: |[{"id": "facecb08-5587-4113-9009-ad339833d9ab", "address": "fa:16:3e:d8:f0:f8", "network": {"id": "2801e625-b23c-455a-80ae-80f2a4f34148", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1431071033-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c1f0c999ede418c866074d9276050ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfacecb08-55", "ovs_interfaceid": "facecb08-5587-4113-9009-ad339833d9ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7265}} [ 754.722472] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6bbdfce1-6fcc-438b-8875-a0f444ff6987 tempest-ServersAdminTestJSON-243259323 tempest-ServersAdminTestJSON-243259323-project-admin] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Reconfiguring VM instance to set the machine id {{(pid=63538) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1802}} [ 754.724159] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35b9ab56-4f7e-4750-a4c1-9960172ad6ba {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.743846] env[63538]: DEBUG oslo_vmware.api [None req-6bbdfce1-6fcc-438b-8875-a0f444ff6987 tempest-ServersAdminTestJSON-243259323 tempest-ServersAdminTestJSON-243259323-project-admin] Waiting for the task: (returnval){ [ 754.743846] env[63538]: value = "task-5100802" [ 754.743846] env[63538]: _type = "Task" [ 754.743846] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.760229] env[63538]: DEBUG oslo_vmware.api [None req-6bbdfce1-6fcc-438b-8875-a0f444ff6987 tempest-ServersAdminTestJSON-243259323 tempest-ServersAdminTestJSON-243259323-project-admin] Task: {'id': task-5100802, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.786639] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100798, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.536512} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.786639] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b/5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 754.786894] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 754.787194] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a508f24d-c146-4192-8109-134db252d4c7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.799815] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for the task: (returnval){ [ 754.799815] env[63538]: value = "task-5100803" [ 754.799815] env[63538]: _type = "Task" [ 754.799815] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.816525] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100803, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.822140] env[63538]: DEBUG nova.policy [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cd6de144ccc4498aa90ae01ca7a0f6f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0d6954a5254f441ca256c85330297cef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 754.838208] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100801, 'name': CreateVM_Task, 'duration_secs': 0.408087} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.838208] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 754.838208] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 754.838208] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.838896] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 754.839235] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c93ec0e6-16ed-4752-8126-32bc3308494d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.849038] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 754.849038] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5251ebf7-3047-6f6f-a677-dfd1efe71135" [ 754.849038] env[63538]: _type = "Task" [ 754.849038] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.861594] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5251ebf7-3047-6f6f-a677-dfd1efe71135, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.880331] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3d6a9c75-88af-4a07-a8ef-ca91c8be6882 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "db5993ce-6982-4b82-8f5d-3fe51df8896b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.019397] env[63538]: DEBUG oslo_concurrency.lockutils [None req-69e10f5a-db2b-4c1e-8e8f-64334d3b7379 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "04dc612b-7987-405b-9716-95c4ff3535ec" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.019397] env[63538]: DEBUG oslo_concurrency.lockutils [None req-69e10f5a-db2b-4c1e-8e8f-64334d3b7379 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "04dc612b-7987-405b-9716-95c4ff3535ec" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 755.019645] env[63538]: DEBUG nova.compute.manager [None req-69e10f5a-db2b-4c1e-8e8f-64334d3b7379 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 755.020656] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-babb8805-8e85-4a2e-9a05-4650cd09e272 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.032101] env[63538]: DEBUG nova.compute.manager [None req-69e10f5a-db2b-4c1e-8e8f-64334d3b7379 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63538) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 755.032101] env[63538]: DEBUG nova.objects.instance [None req-69e10f5a-db2b-4c1e-8e8f-64334d3b7379 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lazy-loading 'flavor' on Instance uuid 04dc612b-7987-405b-9716-95c4ff3535ec {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 755.227151] env[63538]: DEBUG nova.compute.manager [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 755.258569] env[63538]: DEBUG oslo_vmware.api [None req-6bbdfce1-6fcc-438b-8875-a0f444ff6987 tempest-ServersAdminTestJSON-243259323 tempest-ServersAdminTestJSON-243259323-project-admin] Task: {'id': task-5100802, 'name': ReconfigVM_Task, 'duration_secs': 0.270855} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.258976] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6bbdfce1-6fcc-438b-8875-a0f444ff6987 tempest-ServersAdminTestJSON-243259323 tempest-ServersAdminTestJSON-243259323-project-admin] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Reconfigured VM instance to set the machine id {{(pid=63538) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1805}} [ 755.312045] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100803, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.112229} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.316904] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 755.317416] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64eee77b-8dcc-4114-b16a-95aa7919e1de {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.343613] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b/5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 755.346740] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e5ecfb3-0514-47d8-8acb-51b1d16e058b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.373644] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5251ebf7-3047-6f6f-a677-dfd1efe71135, 'name': SearchDatastore_Task, 'duration_secs': 0.013214} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.378058] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 755.378058] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 755.378058] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 755.378058] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.378058] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 755.378058] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for the task: (returnval){ [ 755.378058] env[63538]: value = "task-5100804" [ 755.378058] env[63538]: _type = "Task" [ 755.378058] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.378058] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2a580da-77b5-465e-a89f-5476a6943d2e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.389078] env[63538]: DEBUG nova.compute.manager [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 755.399876] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100804, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.404682] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 755.404682] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 755.405847] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45601251-e4fa-45b4-838e-259376f31296 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.416209] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 755.416209] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525906ad-9330-f5aa-2bbc-0c64f09006b3" [ 755.416209] env[63538]: _type = "Task" [ 755.416209] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.240391] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-69e10f5a-db2b-4c1e-8e8f-64334d3b7379 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 756.246963] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525906ad-9330-f5aa-2bbc-0c64f09006b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.247187] env[63538]: WARNING oslo_vmware.common.loopingcall [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] task run outlasted interval by 0.332113 sec [ 756.253184] env[63538]: DEBUG nova.network.neutron [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Successfully created port: a679ee9b-3e51-4ce7-ab24-0792218d36ba {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 756.255761] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a8ec774e-e84d-4651-a8aa-4193bc51e7fb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.273506] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100804, 'name': ReconfigVM_Task, 'duration_secs': 0.806337} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.278310] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Reconfigured VM instance instance-0000002e to attach disk [datastore1] 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b/5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 756.279420] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=63538) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1373}} [ 756.279552] env[63538]: DEBUG oslo_vmware.api [None req-69e10f5a-db2b-4c1e-8e8f-64334d3b7379 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 756.279552] env[63538]: value = "task-5100805" [ 756.279552] env[63538]: _type = "Task" [ 756.279552] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.280130] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525906ad-9330-f5aa-2bbc-0c64f09006b3, 'name': SearchDatastore_Task, 'duration_secs': 0.011007} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.280354] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-e28a0ff1-4b1a-402f-b1f6-2c30548c5e4a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.284256] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 756.293606] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3336fcc1-0835-4654-a6c3-1e4c0765503e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.302374] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 756.302374] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522c80ad-cf92-0b2a-5cbc-e4709083b9a5" [ 756.302374] env[63538]: _type = "Task" [ 756.302374] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.302374] env[63538]: DEBUG oslo_vmware.api [None req-69e10f5a-db2b-4c1e-8e8f-64334d3b7379 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100805, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.304028] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for the task: (returnval){ [ 756.304028] env[63538]: value = "task-5100806" [ 756.304028] env[63538]: _type = "Task" [ 756.304028] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.322945] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100806, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.323743] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522c80ad-cf92-0b2a-5cbc-e4709083b9a5, 'name': SearchDatastore_Task, 'duration_secs': 0.010964} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.324944] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.324944] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 707a79e2-f5db-479c-b719-1e040935cda3/707a79e2-f5db-479c-b719-1e040935cda3.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 756.324944] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-63c4f011-3358-415d-9d79-d5625bcb1447 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.334655] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 756.334655] env[63538]: value = "task-5100807" [ 756.334655] env[63538]: _type = "Task" [ 756.334655] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.346612] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100807, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.397734] env[63538]: DEBUG nova.network.neutron [req-6c025540-2301-42bd-8d2f-d38b6ee0d468 req-7e3fae15-ca04-483d-a56a-ec904cc183b5 service nova] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Updated VIF entry in instance network info cache for port 2e0047f2-712d-4e63-b423-df4605d54382. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 756.397959] env[63538]: DEBUG nova.network.neutron [req-6c025540-2301-42bd-8d2f-d38b6ee0d468 req-7e3fae15-ca04-483d-a56a-ec904cc183b5 service nova] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Updating instance_info_cache with network_info: [{"id": "2e0047f2-712d-4e63-b423-df4605d54382", "address": "fa:16:3e:aa:f2:b5", "network": {"id": "8facc2ff-7a17-4beb-ad4f-7cf9d95cc93c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-137157780-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a701618902d411b8af203fdbb1069be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e0047f2-71", "ovs_interfaceid": "2e0047f2-712d-4e63-b423-df4605d54382", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.750541] env[63538]: DEBUG nova.compute.manager [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 756.794285] env[63538]: DEBUG nova.virt.hardware [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 756.794647] env[63538]: DEBUG nova.virt.hardware [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 756.794886] env[63538]: DEBUG nova.virt.hardware [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 756.795201] env[63538]: DEBUG nova.virt.hardware [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 756.795326] env[63538]: DEBUG nova.virt.hardware [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 756.795471] env[63538]: DEBUG nova.virt.hardware [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 756.795766] env[63538]: DEBUG nova.virt.hardware [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 756.796465] env[63538]: DEBUG nova.virt.hardware [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 756.796465] env[63538]: DEBUG nova.virt.hardware [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 756.796465] env[63538]: DEBUG nova.virt.hardware [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 756.796705] env[63538]: DEBUG nova.virt.hardware [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 756.802046] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b50290f-e159-485b-a58a-46cf20712a04 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.804389] env[63538]: DEBUG oslo_vmware.api [None req-69e10f5a-db2b-4c1e-8e8f-64334d3b7379 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100805, 'name': PowerOffVM_Task, 'duration_secs': 0.27376} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.805862] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-69e10f5a-db2b-4c1e-8e8f-64334d3b7379 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 756.806113] env[63538]: DEBUG nova.compute.manager [None req-69e10f5a-db2b-4c1e-8e8f-64334d3b7379 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 756.807070] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d03d5dd8-b094-49b0-8b8b-4a27f55765cd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.811899] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00adb337-f64c-4838-b3a2-552129c333ab {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.823280] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2523b346-de93-4b49-9925-5586dc1abe21 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.836694] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c17679d1-4b63-44db-b07c-7b1189c9ecd7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.855742] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100806, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.063355} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.860239] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=63538) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1394}} [ 756.861793] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0411ebfb-3cdb-492c-9210-f8cc78d4a7cc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.892476] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8440b8af-c49b-49a2-b7e7-c332d46a86cd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.912839] env[63538]: DEBUG oslo_concurrency.lockutils [req-6c025540-2301-42bd-8d2f-d38b6ee0d468 req-7e3fae15-ca04-483d-a56a-ec904cc183b5 service nova] Releasing lock "refresh_cache-707a79e2-f5db-479c-b719-1e040935cda3" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.920787] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b/ephemeral_0.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 756.921233] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100807, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.551744} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.922297] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf9d9ea4-4eec-4fe0-b53a-4fd9845bbc12 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.936170] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 707a79e2-f5db-479c-b719-1e040935cda3/707a79e2-f5db-479c-b719-1e040935cda3.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 756.936453] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 756.939360] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fe2cfd7d-b0d2-4de2-a2dd-b7a684663434 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.943301] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1afb2b0-2e9a-4603-baa8-e7d5fea823b9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.949899] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for the task: (returnval){ [ 756.949899] env[63538]: value = "task-5100808" [ 756.949899] env[63538]: _type = "Task" [ 756.949899] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.956460] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 756.956460] env[63538]: value = "task-5100809" [ 756.956460] env[63538]: _type = "Task" [ 756.956460] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.965938] env[63538]: DEBUG nova.compute.provider_tree [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 756.974335] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100808, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.980728] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100809, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.349641] env[63538]: DEBUG oslo_concurrency.lockutils [None req-69e10f5a-db2b-4c1e-8e8f-64334d3b7379 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "04dc612b-7987-405b-9716-95c4ff3535ec" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.330s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.470246] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100808, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.475860] env[63538]: DEBUG nova.scheduler.client.report [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 757.486663] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100809, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079067} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.487021] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 757.487880] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb309e1-0c39-4205-89f2-d383dccaddc6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.514619] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] 707a79e2-f5db-479c-b719-1e040935cda3/707a79e2-f5db-479c-b719-1e040935cda3.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 757.515530] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-40764832-7b2b-4380-b64e-7c7d0f80e2d8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.535895] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 757.535895] env[63538]: value = "task-5100810" [ 757.535895] env[63538]: _type = "Task" [ 757.535895] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.545277] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100810, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.962085] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100808, 'name': ReconfigVM_Task, 'duration_secs': 0.908259} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.962395] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Reconfigured VM instance instance-0000002e to attach disk [datastore1] 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b/ephemeral_0.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 757.963092] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1481dde4-f8f1-4b5a-9472-51506ade8b06 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.970777] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for the task: (returnval){ [ 757.970777] env[63538]: value = "task-5100811" [ 757.970777] env[63538]: _type = "Task" [ 757.970777] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.980528] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100811, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.981498] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.782s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.981995] env[63538]: DEBUG nova.compute.manager [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 757.985214] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 32.106s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.985534] env[63538]: DEBUG nova.objects.instance [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63538) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 758.049297] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100810, 'name': ReconfigVM_Task, 'duration_secs': 0.319505} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.049569] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Reconfigured VM instance instance-0000002f to attach disk [datastore1] 707a79e2-f5db-479c-b719-1e040935cda3/707a79e2-f5db-479c-b719-1e040935cda3.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 758.051908] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b61b9c3f-1e7e-40d9-855a-b584ee7c4cc6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.058701] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 758.058701] env[63538]: value = "task-5100812" [ 758.058701] env[63538]: _type = "Task" [ 758.058701] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.069945] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100812, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.197234] env[63538]: DEBUG nova.network.neutron [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Successfully updated port: a679ee9b-3e51-4ce7-ab24-0792218d36ba {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 758.447393] env[63538]: DEBUG nova.compute.manager [req-11dee65d-d650-4fdf-b106-e6427ceb2e57 req-7077fbfb-4303-4e47-bde8-d21e08f538c6 service nova] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Received event network-vif-plugged-a679ee9b-3e51-4ce7-ab24-0792218d36ba {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 758.447723] env[63538]: DEBUG oslo_concurrency.lockutils [req-11dee65d-d650-4fdf-b106-e6427ceb2e57 req-7077fbfb-4303-4e47-bde8-d21e08f538c6 service nova] Acquiring lock "466be7db-79e4-49fd-aa3b-56fbe5c60457-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.448101] env[63538]: DEBUG oslo_concurrency.lockutils [req-11dee65d-d650-4fdf-b106-e6427ceb2e57 req-7077fbfb-4303-4e47-bde8-d21e08f538c6 service nova] Lock "466be7db-79e4-49fd-aa3b-56fbe5c60457-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.448101] env[63538]: DEBUG oslo_concurrency.lockutils [req-11dee65d-d650-4fdf-b106-e6427ceb2e57 req-7077fbfb-4303-4e47-bde8-d21e08f538c6 service nova] Lock "466be7db-79e4-49fd-aa3b-56fbe5c60457-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 758.448359] env[63538]: DEBUG nova.compute.manager [req-11dee65d-d650-4fdf-b106-e6427ceb2e57 req-7077fbfb-4303-4e47-bde8-d21e08f538c6 service nova] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] No waiting events found dispatching network-vif-plugged-a679ee9b-3e51-4ce7-ab24-0792218d36ba {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 758.448612] env[63538]: WARNING nova.compute.manager [req-11dee65d-d650-4fdf-b106-e6427ceb2e57 req-7077fbfb-4303-4e47-bde8-d21e08f538c6 service nova] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Received unexpected event network-vif-plugged-a679ee9b-3e51-4ce7-ab24-0792218d36ba for instance with vm_state building and task_state spawning. [ 758.483024] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100811, 'name': Rename_Task, 'duration_secs': 0.290531} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.483024] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 758.483294] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b0dc49fc-bd8d-40cb-9d2a-3b1d9c1d0fd5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.486457] env[63538]: DEBUG nova.compute.utils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 758.487874] env[63538]: DEBUG nova.compute.manager [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 758.490896] env[63538]: DEBUG nova.network.neutron [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 758.499293] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for the task: (returnval){ [ 758.499293] env[63538]: value = "task-5100813" [ 758.499293] env[63538]: _type = "Task" [ 758.499293] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.512496] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100813, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.558318] env[63538]: DEBUG nova.policy [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '577a52928adf4587b963772b31a378cc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5d463d24e41b421eb7cb9d51ad207495', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 758.570752] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100812, 'name': Rename_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.704731] env[63538]: DEBUG oslo_concurrency.lockutils [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "refresh_cache-466be7db-79e4-49fd-aa3b-56fbe5c60457" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.704731] env[63538]: DEBUG oslo_concurrency.lockutils [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired lock "refresh_cache-466be7db-79e4-49fd-aa3b-56fbe5c60457" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.704954] env[63538]: DEBUG nova.network.neutron [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 758.995213] env[63538]: DEBUG nova.compute.manager [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 759.003738] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fea40da8-602f-488d-b79e-4d2ffd718497 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.005170] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.698s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.006515] env[63538]: DEBUG nova.objects.instance [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Lazy-loading 'resources' on Instance uuid 43729260-d138-4e62-9cc5-4db3ca39f5d2 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 759.021265] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100813, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.028447] env[63538]: DEBUG nova.network.neutron [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Successfully created port: cb36613d-3fcd-42c3-9f60-e642855df901 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 759.070821] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100812, 'name': Rename_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.098689] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "d967631f-5c8a-42d8-ac05-4cec3bdb55cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 759.098962] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "d967631f-5c8a-42d8-ac05-4cec3bdb55cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.268849] env[63538]: DEBUG nova.network.neutron [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 759.519804] env[63538]: DEBUG oslo_vmware.api [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100813, 'name': PowerOnVM_Task, 'duration_secs': 0.722879} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.523027] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 759.523027] env[63538]: INFO nova.compute.manager [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Took 10.76 seconds to spawn the instance on the hypervisor. [ 759.523027] env[63538]: DEBUG nova.compute.manager [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 759.523027] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3265cdee-8bc5-415b-a31e-e9ec99d6cf40 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.572677] env[63538]: DEBUG nova.network.neutron [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Updating instance_info_cache with network_info: [{"id": "a679ee9b-3e51-4ce7-ab24-0792218d36ba", "address": "fa:16:3e:08:aa:47", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa679ee9b-3e", "ovs_interfaceid": "a679ee9b-3e51-4ce7-ab24-0792218d36ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.581920] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100812, 'name': Rename_Task, 'duration_secs': 1.146231} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.585301] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 759.585642] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f22c04fd-4422-4bb8-965c-5e2fa92bbe79 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.594270] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 759.594270] env[63538]: value = "task-5100814" [ 759.594270] env[63538]: _type = "Task" [ 759.594270] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.608211] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100814, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.782297] env[63538]: DEBUG nova.compute.manager [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 759.784417] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f5f92d-f95b-4653-8fe9-7f5ab6729b54 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.015922] env[63538]: DEBUG nova.compute.manager [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 760.043219] env[63538]: INFO nova.compute.manager [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Took 43.88 seconds to build instance. [ 760.058691] env[63538]: DEBUG nova.virt.hardware [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 760.059123] env[63538]: DEBUG nova.virt.hardware [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 760.059746] env[63538]: DEBUG nova.virt.hardware [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 760.059746] env[63538]: DEBUG nova.virt.hardware [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 760.059746] env[63538]: DEBUG nova.virt.hardware [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 760.059950] env[63538]: DEBUG nova.virt.hardware [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 760.060347] env[63538]: DEBUG nova.virt.hardware [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 760.060347] env[63538]: DEBUG nova.virt.hardware [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 760.060449] env[63538]: DEBUG nova.virt.hardware [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 760.060596] env[63538]: DEBUG nova.virt.hardware [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 760.060759] env[63538]: DEBUG nova.virt.hardware [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 760.061778] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8139e969-c775-4774-8139-60a92d1bfe76 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.076580] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac775511-6e5e-49d1-b564-10c217a033f9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.082032] env[63538]: DEBUG oslo_concurrency.lockutils [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Releasing lock "refresh_cache-466be7db-79e4-49fd-aa3b-56fbe5c60457" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 760.082352] env[63538]: DEBUG nova.compute.manager [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Instance network_info: |[{"id": "a679ee9b-3e51-4ce7-ab24-0792218d36ba", "address": "fa:16:3e:08:aa:47", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa679ee9b-3e", "ovs_interfaceid": "a679ee9b-3e51-4ce7-ab24-0792218d36ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 760.085962] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:aa:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f39e3b37-7906-4bbc-820e-ceac74e4d827', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a679ee9b-3e51-4ce7-ab24-0792218d36ba', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 760.094213] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Creating folder: Project (0d6954a5254f441ca256c85330297cef). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 760.095667] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a491072c-25da-439b-8163-7ed19e0e1d6f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.120832] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100814, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.124286] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Created folder: Project (0d6954a5254f441ca256c85330297cef) in parent group-v992234. [ 760.124638] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Creating folder: Instances. Parent ref: group-v992359. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 760.125442] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7e1f50c3-79eb-4d14-937f-f67abb1465e9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.138169] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Created folder: Instances in parent group-v992359. [ 760.138690] env[63538]: DEBUG oslo.service.loopingcall [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 760.138785] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 760.138992] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8dfcb04a-139e-4503-90f9-325fdd639960 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.166489] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 760.166489] env[63538]: value = "task-5100817" [ 760.166489] env[63538]: _type = "Task" [ 760.166489] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.179766] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100817, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.228721] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90877188-eb94-4660-87c8-4ad6c9053b30 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.238905] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f2ed3b1-9f64-4c80-ad6e-1be1772c8318 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.275055] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ac00158-5a90-46fc-adb8-e022084a504e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.286104] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03f77d7b-59e1-4671-985c-8bd512864d6a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.304780] env[63538]: INFO nova.compute.manager [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] instance snapshotting [ 760.305203] env[63538]: WARNING nova.compute.manager [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 760.307957] env[63538]: DEBUG nova.compute.provider_tree [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.313176] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3260c31-7709-425e-ad55-ac041641ecdd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.336248] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79049de-5916-4ab1-9038-4a1bdf154088 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.545536] env[63538]: DEBUG oslo_concurrency.lockutils [None req-692bb16d-b170-4714-8e6c-9aeb20ebadf5 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Lock "5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 106.943s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.612295] env[63538]: DEBUG oslo_vmware.api [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100814, 'name': PowerOnVM_Task, 'duration_secs': 0.795311} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.612295] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 760.612845] env[63538]: INFO nova.compute.manager [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Took 9.58 seconds to spawn the instance on the hypervisor. [ 760.614024] env[63538]: DEBUG nova.compute.manager [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 760.614600] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f535de6c-149a-40cf-a67f-d92ead4bfc52 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.685511] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100817, 'name': CreateVM_Task, 'duration_secs': 0.4713} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.685511] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 760.685511] env[63538]: DEBUG oslo_concurrency.lockutils [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 760.685511] env[63538]: DEBUG oslo_concurrency.lockutils [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.685511] env[63538]: DEBUG oslo_concurrency.lockutils [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 760.685994] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-783a6861-05ee-488e-a80e-d65e3c29d8e5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.692199] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 760.692199] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5231ee8a-a107-5c9c-2acb-cef3f62c62a3" [ 760.692199] env[63538]: _type = "Task" [ 760.692199] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.701825] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5231ee8a-a107-5c9c-2acb-cef3f62c62a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.815902] env[63538]: DEBUG nova.scheduler.client.report [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 760.850240] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Creating Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 760.850569] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-988aa8f1-46f9-47d3-a624-220f840152df {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.860663] env[63538]: DEBUG oslo_vmware.api [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 760.860663] env[63538]: value = "task-5100818" [ 760.860663] env[63538]: _type = "Task" [ 760.860663] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.870854] env[63538]: DEBUG oslo_vmware.api [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100818, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.966950] env[63538]: DEBUG nova.network.neutron [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Successfully updated port: cb36613d-3fcd-42c3-9f60-e642855df901 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 761.054024] env[63538]: DEBUG nova.compute.manager [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 761.135558] env[63538]: INFO nova.compute.manager [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Took 44.44 seconds to build instance. [ 761.159442] env[63538]: DEBUG nova.compute.manager [req-eb38d7e5-1dd6-47ed-8324-02e774656238 req-8e92a768-2ca9-4cae-bcc3-e753e02e9abe service nova] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Received event network-changed-a679ee9b-3e51-4ce7-ab24-0792218d36ba {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 761.159666] env[63538]: DEBUG nova.compute.manager [req-eb38d7e5-1dd6-47ed-8324-02e774656238 req-8e92a768-2ca9-4cae-bcc3-e753e02e9abe service nova] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Refreshing instance network info cache due to event network-changed-a679ee9b-3e51-4ce7-ab24-0792218d36ba. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 761.159895] env[63538]: DEBUG oslo_concurrency.lockutils [req-eb38d7e5-1dd6-47ed-8324-02e774656238 req-8e92a768-2ca9-4cae-bcc3-e753e02e9abe service nova] Acquiring lock "refresh_cache-466be7db-79e4-49fd-aa3b-56fbe5c60457" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.160059] env[63538]: DEBUG oslo_concurrency.lockutils [req-eb38d7e5-1dd6-47ed-8324-02e774656238 req-8e92a768-2ca9-4cae-bcc3-e753e02e9abe service nova] Acquired lock "refresh_cache-466be7db-79e4-49fd-aa3b-56fbe5c60457" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.160247] env[63538]: DEBUG nova.network.neutron [req-eb38d7e5-1dd6-47ed-8324-02e774656238 req-8e92a768-2ca9-4cae-bcc3-e753e02e9abe service nova] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Refreshing network info cache for port a679ee9b-3e51-4ce7-ab24-0792218d36ba {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 761.204779] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5231ee8a-a107-5c9c-2acb-cef3f62c62a3, 'name': SearchDatastore_Task, 'duration_secs': 0.016104} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.205223] env[63538]: DEBUG oslo_concurrency.lockutils [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.205498] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 761.205789] env[63538]: DEBUG oslo_concurrency.lockutils [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.206998] env[63538]: DEBUG oslo_concurrency.lockutils [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.206998] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 761.206998] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-28ed7c46-655a-4a17-86da-e08df7bdd680 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.216654] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 761.216856] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 761.217650] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f630dbbc-7f10-47c2-8ca3-a62446fb73c2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.223476] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 761.223476] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52850110-c090-0685-5c94-d33fe98d8e83" [ 761.223476] env[63538]: _type = "Task" [ 761.223476] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.233244] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52850110-c090-0685-5c94-d33fe98d8e83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.305394] env[63538]: DEBUG nova.compute.manager [req-3c44fc20-917f-4a88-8946-4b70df9e3a3b req-abf3eb86-c1b5-453e-bdbd-85c4f09c79c2 service nova] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Received event network-changed-6d45b11c-cd3e-4589-9931-5ffdbbc4e193 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 761.306314] env[63538]: DEBUG nova.compute.manager [req-3c44fc20-917f-4a88-8946-4b70df9e3a3b req-abf3eb86-c1b5-453e-bdbd-85c4f09c79c2 service nova] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Refreshing instance network info cache due to event network-changed-6d45b11c-cd3e-4589-9931-5ffdbbc4e193. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 761.306314] env[63538]: DEBUG oslo_concurrency.lockutils [req-3c44fc20-917f-4a88-8946-4b70df9e3a3b req-abf3eb86-c1b5-453e-bdbd-85c4f09c79c2 service nova] Acquiring lock "refresh_cache-5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.306314] env[63538]: DEBUG oslo_concurrency.lockutils [req-3c44fc20-917f-4a88-8946-4b70df9e3a3b req-abf3eb86-c1b5-453e-bdbd-85c4f09c79c2 service nova] Acquired lock "refresh_cache-5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.306314] env[63538]: DEBUG nova.network.neutron [req-3c44fc20-917f-4a88-8946-4b70df9e3a3b req-abf3eb86-c1b5-453e-bdbd-85c4f09c79c2 service nova] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Refreshing network info cache for port 6d45b11c-cd3e-4589-9931-5ffdbbc4e193 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 761.322451] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.317s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.325609] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.406s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.325892] env[63538]: DEBUG nova.objects.instance [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Lazy-loading 'resources' on Instance uuid bb56950a-3e25-4fb9-9f84-f735e26adc42 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 761.350615] env[63538]: INFO nova.scheduler.client.report [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Deleted allocations for instance 43729260-d138-4e62-9cc5-4db3ca39f5d2 [ 761.374357] env[63538]: DEBUG oslo_vmware.api [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100818, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.470750] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "refresh_cache-46e2c1f4-edf7-45d6-ba77-c872005fcf1b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.470966] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquired lock "refresh_cache-46e2c1f4-edf7-45d6-ba77-c872005fcf1b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.471181] env[63538]: DEBUG nova.network.neutron [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 761.524311] env[63538]: INFO nova.compute.manager [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Rebuilding instance [ 761.572467] env[63538]: DEBUG oslo_concurrency.lockutils [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.573315] env[63538]: DEBUG nova.compute.manager [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 761.574234] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7dafcd5-12a7-46e1-b0cc-46e225355cb8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.637189] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48dee608-ece8-433f-9ab1-b575e0bdb7f7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "707a79e2-f5db-479c-b719-1e040935cda3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 106.780s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.736135] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52850110-c090-0685-5c94-d33fe98d8e83, 'name': SearchDatastore_Task, 'duration_secs': 0.03565} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.736525] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca7b1662-a285-41da-a327-f1d970ce2e40 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.743626] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 761.743626] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bafc14-5f08-2df1-0e9e-3850803f436a" [ 761.743626] env[63538]: _type = "Task" [ 761.743626] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.754208] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bafc14-5f08-2df1-0e9e-3850803f436a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.860075] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee6c68fb-35a0-4dd4-86c4-5bb1c5060236 tempest-AttachInterfacesUnderV243Test-1683398079 tempest-AttachInterfacesUnderV243Test-1683398079-project-member] Lock "43729260-d138-4e62-9cc5-4db3ca39f5d2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.800s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.879506] env[63538]: DEBUG oslo_vmware.api [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100818, 'name': CreateSnapshot_Task, 'duration_secs': 1.004509} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.879958] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Created Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 761.882024] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e066b1-fad0-4078-9023-05bf52910316 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.960570] env[63538]: DEBUG nova.compute.manager [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 761.961626] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b8eb4c3-3484-42f5-83f8-969d383b3688 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.967298] env[63538]: DEBUG oslo_vmware.rw_handles [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522ebe39-e366-15f6-47d1-3f6ffbb0df2f/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 761.968224] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d995aaf-434d-4a13-bbb3-5ffbecaf360e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.985879] env[63538]: DEBUG oslo_vmware.rw_handles [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522ebe39-e366-15f6-47d1-3f6ffbb0df2f/disk-0.vmdk is in state: ready. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 761.985879] env[63538]: ERROR oslo_vmware.rw_handles [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522ebe39-e366-15f6-47d1-3f6ffbb0df2f/disk-0.vmdk due to incomplete transfer. [ 761.985879] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-35878f72-ebf9-43ca-b29e-ffa5c23b7a4e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.995343] env[63538]: DEBUG oslo_vmware.rw_handles [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522ebe39-e366-15f6-47d1-3f6ffbb0df2f/disk-0.vmdk. {{(pid=63538) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 761.995617] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Uploaded image 29f321f7-1ca8-4699-a767-09b4915b3bd0 to the Glance image server {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 761.998481] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Destroying the VM {{(pid=63538) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 762.003191] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f882601b-3cb0-4939-a04f-aa4684f9cc42 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.010690] env[63538]: DEBUG oslo_vmware.api [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Waiting for the task: (returnval){ [ 762.010690] env[63538]: value = "task-5100819" [ 762.010690] env[63538]: _type = "Task" [ 762.010690] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.021199] env[63538]: DEBUG oslo_vmware.api [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100819, 'name': Destroy_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.026605] env[63538]: DEBUG nova.network.neutron [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 762.086664] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 762.087269] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3720d204-8364-4ae7-a306-52e08ac49da2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.101642] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 762.101642] env[63538]: value = "task-5100820" [ 762.101642] env[63538]: _type = "Task" [ 762.101642] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.112956] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100820, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.143074] env[63538]: DEBUG nova.compute.manager [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 762.255411] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bafc14-5f08-2df1-0e9e-3850803f436a, 'name': SearchDatastore_Task, 'duration_secs': 0.013889} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.255926] env[63538]: DEBUG nova.network.neutron [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Updating instance_info_cache with network_info: [{"id": "cb36613d-3fcd-42c3-9f60-e642855df901", "address": "fa:16:3e:ea:e1:78", "network": {"id": "106e253d-ba43-4eb6-a423-ed6a6d4156aa", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-330603874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d463d24e41b421eb7cb9d51ad207495", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb36613d-3f", "ovs_interfaceid": "cb36613d-3fcd-42c3-9f60-e642855df901", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.260143] env[63538]: DEBUG oslo_concurrency.lockutils [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.260431] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 466be7db-79e4-49fd-aa3b-56fbe5c60457/466be7db-79e4-49fd-aa3b-56fbe5c60457.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 762.261389] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bc0a447a-c01d-415c-9394-588ddc0d5ef8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.270446] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 762.270446] env[63538]: value = "task-5100821" [ 762.270446] env[63538]: _type = "Task" [ 762.270446] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.285064] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5100821, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.329874] env[63538]: DEBUG nova.network.neutron [req-eb38d7e5-1dd6-47ed-8324-02e774656238 req-8e92a768-2ca9-4cae-bcc3-e753e02e9abe service nova] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Updated VIF entry in instance network info cache for port a679ee9b-3e51-4ce7-ab24-0792218d36ba. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 762.329874] env[63538]: DEBUG nova.network.neutron [req-eb38d7e5-1dd6-47ed-8324-02e774656238 req-8e92a768-2ca9-4cae-bcc3-e753e02e9abe service nova] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Updating instance_info_cache with network_info: [{"id": "a679ee9b-3e51-4ce7-ab24-0792218d36ba", "address": "fa:16:3e:08:aa:47", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa679ee9b-3e", "ovs_interfaceid": "a679ee9b-3e51-4ce7-ab24-0792218d36ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.412993] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Creating linked-clone VM from snapshot {{(pid=63538) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 762.420308] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-651ef144-75bb-41c5-86c5-5aaf992df93a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.431291] env[63538]: DEBUG oslo_vmware.api [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 762.431291] env[63538]: value = "task-5100822" [ 762.431291] env[63538]: _type = "Task" [ 762.431291] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.444179] env[63538]: DEBUG oslo_vmware.api [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100822, 'name': CloneVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.486775] env[63538]: INFO nova.compute.manager [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] instance snapshotting [ 762.490765] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-342bf184-c013-4d78-a80e-d3f34ba9ae80 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.518403] env[63538]: DEBUG nova.network.neutron [req-3c44fc20-917f-4a88-8946-4b70df9e3a3b req-abf3eb86-c1b5-453e-bdbd-85c4f09c79c2 service nova] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Updated VIF entry in instance network info cache for port 6d45b11c-cd3e-4589-9931-5ffdbbc4e193. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 762.518823] env[63538]: DEBUG nova.network.neutron [req-3c44fc20-917f-4a88-8946-4b70df9e3a3b req-abf3eb86-c1b5-453e-bdbd-85c4f09c79c2 service nova] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Updating instance_info_cache with network_info: [{"id": "6d45b11c-cd3e-4589-9931-5ffdbbc4e193", "address": "fa:16:3e:cd:d2:4d", "network": {"id": "c0e827ce-4abc-4178-9811-36625c1d6ebc", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1894083828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.233", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e6480e3bc216427d939223b9e3b6a21b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69351262-8d39-441a-85ba-3a78df436d17", "external-id": "nsx-vlan-transportzone-205", "segmentation_id": 205, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d45b11c-cd", "ovs_interfaceid": "6d45b11c-cd3e-4589-9931-5ffdbbc4e193", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.526396] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0f9ae98-f9ee-4168-a3b4-14576717c834 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.538757] env[63538]: DEBUG oslo_vmware.api [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100819, 'name': Destroy_Task, 'duration_secs': 0.329342} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.541805] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Destroyed the VM [ 762.542344] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Deleting Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 762.546772] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-272db416-a955-4002-884c-8337346a888e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.549472] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7fe495-1dd1-4ab3-85c3-f43a1421d6b3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.560808] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd2ce73d-6d57-4e84-99ad-77ee6024441f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.564892] env[63538]: DEBUG oslo_vmware.api [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Waiting for the task: (returnval){ [ 762.564892] env[63538]: value = "task-5100823" [ 762.564892] env[63538]: _type = "Task" [ 762.564892] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.602339] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e1031fb-d107-4812-ab7a-1684231bdbb6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.609330] env[63538]: DEBUG oslo_vmware.api [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100823, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.624113] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059494b5-384f-42d5-b6ed-d95f7d36192c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.629073] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100820, 'name': PowerOffVM_Task, 'duration_secs': 0.43386} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.630313] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 762.630492] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 762.632055] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2def73e6-3323-41f4-84c0-4f7171b7e041 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.645250] env[63538]: DEBUG nova.compute.provider_tree [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 762.653151] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 762.655617] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea7ece5e-8d56-4038-9ec7-bc097d81d17a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.668908] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 762.762718] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Releasing lock "refresh_cache-46e2c1f4-edf7-45d6-ba77-c872005fcf1b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.763261] env[63538]: DEBUG nova.compute.manager [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Instance network_info: |[{"id": "cb36613d-3fcd-42c3-9f60-e642855df901", "address": "fa:16:3e:ea:e1:78", "network": {"id": "106e253d-ba43-4eb6-a423-ed6a6d4156aa", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-330603874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d463d24e41b421eb7cb9d51ad207495", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb36613d-3f", "ovs_interfaceid": "cb36613d-3fcd-42c3-9f60-e642855df901", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 762.763924] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:e1:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bec903a9-d773-4d7c-a80c-c2533be346fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cb36613d-3fcd-42c3-9f60-e642855df901', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 762.773474] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Creating folder: Project (5d463d24e41b421eb7cb9d51ad207495). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 762.774015] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab0ce33a-38f4-424b-a7f8-58c2935d1c47 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.790133] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 762.790133] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 762.790133] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Deleting the datastore file [datastore2] c8a02fa6-5232-4dde-b6dd-0da1089b6bbf {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 762.790133] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4bb00211-5a4b-4480-8687-511fa1a43d0e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.799722] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Created folder: Project (5d463d24e41b421eb7cb9d51ad207495) in parent group-v992234. [ 762.799722] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Creating folder: Instances. Parent ref: group-v992364. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 762.804108] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-91590e9d-af08-4e80-bbfa-993ea378289a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.807860] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5100821, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.810770] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 762.810770] env[63538]: value = "task-5100826" [ 762.810770] env[63538]: _type = "Task" [ 762.810770] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.825806] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100826, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.828487] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Created folder: Instances in parent group-v992364. [ 762.829038] env[63538]: DEBUG oslo.service.loopingcall [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 762.829355] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 762.829598] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6901bf7e-eaeb-441d-bcf2-d8509bdb6136 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.847972] env[63538]: DEBUG oslo_concurrency.lockutils [req-eb38d7e5-1dd6-47ed-8324-02e774656238 req-8e92a768-2ca9-4cae-bcc3-e753e02e9abe service nova] Releasing lock "refresh_cache-466be7db-79e4-49fd-aa3b-56fbe5c60457" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.857415] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 762.857415] env[63538]: value = "task-5100828" [ 762.857415] env[63538]: _type = "Task" [ 762.857415] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.868813] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100828, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.946162] env[63538]: DEBUG oslo_vmware.api [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100822, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.031107] env[63538]: DEBUG oslo_concurrency.lockutils [req-3c44fc20-917f-4a88-8946-4b70df9e3a3b req-abf3eb86-c1b5-453e-bdbd-85c4f09c79c2 service nova] Releasing lock "refresh_cache-5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.054402] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Creating Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 763.054975] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-dce79a32-23dc-4a46-a0c2-b43231c4a47d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.067013] env[63538]: DEBUG oslo_vmware.api [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 763.067013] env[63538]: value = "task-5100829" [ 763.067013] env[63538]: _type = "Task" [ 763.067013] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.083393] env[63538]: DEBUG oslo_vmware.api [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100823, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.086946] env[63538]: DEBUG oslo_vmware.api [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100829, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.152066] env[63538]: DEBUG nova.scheduler.client.report [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 763.286578] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5100821, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.818987} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.286578] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 466be7db-79e4-49fd-aa3b-56fbe5c60457/466be7db-79e4-49fd-aa3b-56fbe5c60457.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 763.286578] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 763.287098] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-918008e8-fa5e-46ee-97af-0eaa00f80fc5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.290823] env[63538]: INFO nova.compute.manager [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Rebuilding instance [ 763.295198] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 763.295198] env[63538]: value = "task-5100830" [ 763.295198] env[63538]: _type = "Task" [ 763.295198] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.309670] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5100830, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.328067] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100826, 'name': DeleteDatastoreFile_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.353108] env[63538]: DEBUG nova.compute.manager [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 763.355156] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b373947-1f38-4963-a88e-4d50b65a0418 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.370046] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100828, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.388359] env[63538]: DEBUG nova.compute.manager [req-21e29e44-9bdc-46e8-b18a-9173f072256c req-64811df2-6ae4-47a1-a696-2c458cb223f7 service nova] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Received event network-vif-plugged-cb36613d-3fcd-42c3-9f60-e642855df901 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 763.388649] env[63538]: DEBUG oslo_concurrency.lockutils [req-21e29e44-9bdc-46e8-b18a-9173f072256c req-64811df2-6ae4-47a1-a696-2c458cb223f7 service nova] Acquiring lock "46e2c1f4-edf7-45d6-ba77-c872005fcf1b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.388915] env[63538]: DEBUG oslo_concurrency.lockutils [req-21e29e44-9bdc-46e8-b18a-9173f072256c req-64811df2-6ae4-47a1-a696-2c458cb223f7 service nova] Lock "46e2c1f4-edf7-45d6-ba77-c872005fcf1b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.389336] env[63538]: DEBUG oslo_concurrency.lockutils [req-21e29e44-9bdc-46e8-b18a-9173f072256c req-64811df2-6ae4-47a1-a696-2c458cb223f7 service nova] Lock "46e2c1f4-edf7-45d6-ba77-c872005fcf1b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.389676] env[63538]: DEBUG nova.compute.manager [req-21e29e44-9bdc-46e8-b18a-9173f072256c req-64811df2-6ae4-47a1-a696-2c458cb223f7 service nova] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] No waiting events found dispatching network-vif-plugged-cb36613d-3fcd-42c3-9f60-e642855df901 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 763.389970] env[63538]: WARNING nova.compute.manager [req-21e29e44-9bdc-46e8-b18a-9173f072256c req-64811df2-6ae4-47a1-a696-2c458cb223f7 service nova] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Received unexpected event network-vif-plugged-cb36613d-3fcd-42c3-9f60-e642855df901 for instance with vm_state building and task_state spawning. [ 763.390260] env[63538]: DEBUG nova.compute.manager [req-21e29e44-9bdc-46e8-b18a-9173f072256c req-64811df2-6ae4-47a1-a696-2c458cb223f7 service nova] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Received event network-changed-cb36613d-3fcd-42c3-9f60-e642855df901 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 763.390425] env[63538]: DEBUG nova.compute.manager [req-21e29e44-9bdc-46e8-b18a-9173f072256c req-64811df2-6ae4-47a1-a696-2c458cb223f7 service nova] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Refreshing instance network info cache due to event network-changed-cb36613d-3fcd-42c3-9f60-e642855df901. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 763.390652] env[63538]: DEBUG oslo_concurrency.lockutils [req-21e29e44-9bdc-46e8-b18a-9173f072256c req-64811df2-6ae4-47a1-a696-2c458cb223f7 service nova] Acquiring lock "refresh_cache-46e2c1f4-edf7-45d6-ba77-c872005fcf1b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.390834] env[63538]: DEBUG oslo_concurrency.lockutils [req-21e29e44-9bdc-46e8-b18a-9173f072256c req-64811df2-6ae4-47a1-a696-2c458cb223f7 service nova] Acquired lock "refresh_cache-46e2c1f4-edf7-45d6-ba77-c872005fcf1b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.391261] env[63538]: DEBUG nova.network.neutron [req-21e29e44-9bdc-46e8-b18a-9173f072256c req-64811df2-6ae4-47a1-a696-2c458cb223f7 service nova] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Refreshing network info cache for port cb36613d-3fcd-42c3-9f60-e642855df901 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 763.444797] env[63538]: DEBUG oslo_vmware.api [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100822, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.578404] env[63538]: DEBUG oslo_vmware.api [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100823, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.587362] env[63538]: DEBUG oslo_vmware.api [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100829, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.658428] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.333s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.661029] env[63538]: DEBUG oslo_concurrency.lockutils [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.690s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.661195] env[63538]: DEBUG nova.objects.instance [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lazy-loading 'resources' on Instance uuid 6f29f063-ddb5-491a-a1a0-7c9ed65a1718 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 763.679478] env[63538]: INFO nova.scheduler.client.report [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Deleted allocations for instance bb56950a-3e25-4fb9-9f84-f735e26adc42 [ 763.807123] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5100830, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095324} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.807675] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 763.809046] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ec87e3c-5e7b-4c1b-9e31-a4d6ac5494db {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.843083] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] 466be7db-79e4-49fd-aa3b-56fbe5c60457/466be7db-79e4-49fd-aa3b-56fbe5c60457.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 763.846869] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ccd7132-3cc8-4164-b057-8c11105b97dd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.862014] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100826, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.515338} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.862638] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 763.862795] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 763.862913] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 763.877091] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 763.877388] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100828, 'name': CreateVM_Task, 'duration_secs': 0.630974} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.878844] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3c4ffdaf-498d-4155-9515-13419501b3b6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.880508] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 763.881477] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 763.881477] env[63538]: value = "task-5100831" [ 763.881477] env[63538]: _type = "Task" [ 763.881477] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.881663] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.881754] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.882102] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 763.882830] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd8badfc-3c4c-46f4-a920-266eec8a6afc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.889739] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 763.889739] env[63538]: value = "task-5100832" [ 763.889739] env[63538]: _type = "Task" [ 763.889739] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.901775] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5100831, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.902176] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 763.902176] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52fe3ba3-6ce4-84bf-5ecb-89110cb0ffee" [ 763.902176] env[63538]: _type = "Task" [ 763.902176] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.911804] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100832, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.918975] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52fe3ba3-6ce4-84bf-5ecb-89110cb0ffee, 'name': SearchDatastore_Task, 'duration_secs': 0.016058} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.920225] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.920531] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 763.920866] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.921152] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.921229] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 763.921929] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-22dab636-37ed-435f-b471-f9cfc524c593 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.933811] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 763.933811] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 763.933811] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15cbc120-9881-4ba6-9f0c-ef6629146f56 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.946852] env[63538]: DEBUG oslo_vmware.api [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100822, 'name': CloneVM_Task, 'duration_secs': 1.515749} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.948063] env[63538]: INFO nova.virt.vmwareapi.vmops [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Created linked-clone VM from snapshot [ 763.948458] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 763.948458] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527726ad-6b0f-a64b-1aa4-01d56d1a4833" [ 763.948458] env[63538]: _type = "Task" [ 763.948458] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.949266] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-982e8042-3331-477e-8224-dccee87a3b6d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.965025] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Uploading image 84b51683-af18-40a9-9328-0e3969474fb9 {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 763.971443] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527726ad-6b0f-a64b-1aa4-01d56d1a4833, 'name': SearchDatastore_Task, 'duration_secs': 0.014602} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.972479] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6eb1b988-39c6-483f-b5cf-66e1dba196b5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.983340] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 763.983340] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f629ff-774d-b237-217d-2cd2f3dabb50" [ 763.983340] env[63538]: _type = "Task" [ 763.983340] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.994771] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f629ff-774d-b237-217d-2cd2f3dabb50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.004637] env[63538]: DEBUG oslo_vmware.rw_handles [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 764.004637] env[63538]: value = "vm-992363" [ 764.004637] env[63538]: _type = "VirtualMachine" [ 764.004637] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 764.004982] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d12733fe-f385-4513-a53d-609b67feb383 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.013327] env[63538]: DEBUG oslo_vmware.rw_handles [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lease: (returnval){ [ 764.013327] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5246358c-e2b8-6650-5b74-fa3747369a1a" [ 764.013327] env[63538]: _type = "HttpNfcLease" [ 764.013327] env[63538]: } obtained for exporting VM: (result){ [ 764.013327] env[63538]: value = "vm-992363" [ 764.013327] env[63538]: _type = "VirtualMachine" [ 764.013327] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 764.013686] env[63538]: DEBUG oslo_vmware.api [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the lease: (returnval){ [ 764.013686] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5246358c-e2b8-6650-5b74-fa3747369a1a" [ 764.013686] env[63538]: _type = "HttpNfcLease" [ 764.013686] env[63538]: } to be ready. {{(pid=63538) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 764.022777] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 764.022777] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5246358c-e2b8-6650-5b74-fa3747369a1a" [ 764.022777] env[63538]: _type = "HttpNfcLease" [ 764.022777] env[63538]: } is initializing. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 764.078242] env[63538]: DEBUG oslo_vmware.api [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100823, 'name': RemoveSnapshot_Task, 'duration_secs': 1.044051} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.078520] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Deleted Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 764.078792] env[63538]: INFO nova.compute.manager [None req-233a2ab7-e91a-4506-a843-366d439fcbe0 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Took 18.25 seconds to snapshot the instance on the hypervisor. [ 764.094920] env[63538]: DEBUG oslo_vmware.api [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100829, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.189367] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a87b4777-1fdc-40af-9385-761fc9b91664 tempest-ServersAaction247Test-324217228 tempest-ServersAaction247Test-324217228-project-member] Lock "bb56950a-3e25-4fb9-9f84-f735e26adc42" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.172s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.348304] env[63538]: DEBUG nova.network.neutron [req-21e29e44-9bdc-46e8-b18a-9173f072256c req-64811df2-6ae4-47a1-a696-2c458cb223f7 service nova] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Updated VIF entry in instance network info cache for port cb36613d-3fcd-42c3-9f60-e642855df901. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 764.348755] env[63538]: DEBUG nova.network.neutron [req-21e29e44-9bdc-46e8-b18a-9173f072256c req-64811df2-6ae4-47a1-a696-2c458cb223f7 service nova] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Updating instance_info_cache with network_info: [{"id": "cb36613d-3fcd-42c3-9f60-e642855df901", "address": "fa:16:3e:ea:e1:78", "network": {"id": "106e253d-ba43-4eb6-a423-ed6a6d4156aa", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-330603874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d463d24e41b421eb7cb9d51ad207495", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb36613d-3f", "ovs_interfaceid": "cb36613d-3fcd-42c3-9f60-e642855df901", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.408196] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5100831, 'name': ReconfigVM_Task, 'duration_secs': 0.479339} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.410940] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Reconfigured VM instance instance-00000030 to attach disk [datastore2] 466be7db-79e4-49fd-aa3b-56fbe5c60457/466be7db-79e4-49fd-aa3b-56fbe5c60457.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 764.411878] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100832, 'name': PowerOffVM_Task, 'duration_secs': 0.24223} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.411955] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-49e7abbe-9352-499c-a91a-c695d87f518f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.414888] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 764.414888] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 764.416722] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b78ab1-df9a-42ac-8318-86b18e614a90 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.426644] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 764.427980] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-de7ddb79-38fa-4a5c-a5ca-e295235ef4d2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.430587] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 764.430587] env[63538]: value = "task-5100834" [ 764.430587] env[63538]: _type = "Task" [ 764.430587] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.445391] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5100834, 'name': Rename_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.503202] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f629ff-774d-b237-217d-2cd2f3dabb50, 'name': SearchDatastore_Task, 'duration_secs': 0.014176} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.507509] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.507814] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 46e2c1f4-edf7-45d6-ba77-c872005fcf1b/46e2c1f4-edf7-45d6-ba77-c872005fcf1b.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 764.508156] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 764.508345] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 764.508543] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Deleting the datastore file [datastore1] 707a79e2-f5db-479c-b719-1e040935cda3 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 764.509412] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-149fb282-ff8c-4ee3-a6b1-888edf53145f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.511987] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-47a72abe-dd3c-4ceb-8e4e-fecddfa596bf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.521952] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 764.521952] env[63538]: value = "task-5100836" [ 764.521952] env[63538]: _type = "Task" [ 764.521952] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.523969] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 764.523969] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5246358c-e2b8-6650-5b74-fa3747369a1a" [ 764.523969] env[63538]: _type = "HttpNfcLease" [ 764.523969] env[63538]: } is ready. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 764.528404] env[63538]: DEBUG oslo_vmware.rw_handles [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 764.528404] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5246358c-e2b8-6650-5b74-fa3747369a1a" [ 764.528404] env[63538]: _type = "HttpNfcLease" [ 764.528404] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 764.528862] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 764.528862] env[63538]: value = "task-5100837" [ 764.528862] env[63538]: _type = "Task" [ 764.528862] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.532274] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f02f5195-769f-4b12-b439-ae237579d6f3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.547456] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100836, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.552933] env[63538]: DEBUG oslo_vmware.rw_handles [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b9c76e-274a-0d4e-ed48-9f20a6a40ad7/disk-0.vmdk from lease info. {{(pid=63538) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 764.552933] env[63538]: DEBUG oslo_vmware.rw_handles [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b9c76e-274a-0d4e-ed48-9f20a6a40ad7/disk-0.vmdk for reading. {{(pid=63538) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 764.555279] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100837, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.633768] env[63538]: DEBUG oslo_vmware.api [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100829, 'name': CreateSnapshot_Task, 'duration_secs': 1.33745} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.637040] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Created Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 764.638169] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3367cd0a-a4ec-4054-b561-90cba281d869 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.669735] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c16ecc29-bfb0-49aa-881f-f31b9673b3dd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.853327] env[63538]: DEBUG oslo_concurrency.lockutils [req-21e29e44-9bdc-46e8-b18a-9173f072256c req-64811df2-6ae4-47a1-a696-2c458cb223f7 service nova] Releasing lock "refresh_cache-46e2c1f4-edf7-45d6-ba77-c872005fcf1b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.915562] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-770a428a-6752-49ce-9b39-433be28aaf67 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.920781] env[63538]: DEBUG nova.virt.hardware [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 764.921219] env[63538]: DEBUG nova.virt.hardware [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 764.921426] env[63538]: DEBUG nova.virt.hardware [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 764.921653] env[63538]: DEBUG nova.virt.hardware [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 764.921879] env[63538]: DEBUG nova.virt.hardware [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 764.922094] env[63538]: DEBUG nova.virt.hardware [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 764.922374] env[63538]: DEBUG nova.virt.hardware [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 764.922583] env[63538]: DEBUG nova.virt.hardware [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 764.922820] env[63538]: DEBUG nova.virt.hardware [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 764.923054] env[63538]: DEBUG nova.virt.hardware [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 764.923278] env[63538]: DEBUG nova.virt.hardware [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 764.924474] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f30ceba-3c3e-445e-b816-fdd4e3f6497d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.942968] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-185cefd2-048d-41f0-ba33-81a2fc3e6104 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.947892] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac241df4-de18-4e55-902b-6deb30a8066d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.956069] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5100834, 'name': Rename_Task, 'duration_secs': 0.186285} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.957502] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 764.959597] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea485e2c-c2c5-4406-b011-19862607b4a1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.002343] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ebd3b8-6292-4103-b5cf-1b4495eff080 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.005759] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 765.005759] env[63538]: value = "task-5100838" [ 765.005759] env[63538]: _type = "Task" [ 765.005759] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.006473] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:60:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6edb8eae-1113-49d0-84f7-9fd9f82b26fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8d80ee33-5e67-4651-a9b1-1f58ca92fb2e', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 765.017229] env[63538]: DEBUG oslo.service.loopingcall [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 765.017229] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 765.019359] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e48d0d10-d5e7-470c-b3fc-c90f3dbe5150 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.046447] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c20f17e-b347-4378-b65a-b1e5aab1dc97 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.056256] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5100838, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.056494] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 765.056494] env[63538]: value = "task-5100839" [ 765.056494] env[63538]: _type = "Task" [ 765.056494] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.074019] env[63538]: DEBUG nova.compute.provider_tree [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 765.075331] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100836, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525109} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.079081] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 46e2c1f4-edf7-45d6-ba77-c872005fcf1b/46e2c1f4-edf7-45d6-ba77-c872005fcf1b.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 765.079325] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 765.079613] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100837, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177409} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.079825] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a7f939a9-d997-4c06-a535-fd3b5f0faf5a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.081900] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 765.082228] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 765.082416] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 765.090571] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100839, 'name': CreateVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.094150] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 765.094150] env[63538]: value = "task-5100840" [ 765.094150] env[63538]: _type = "Task" [ 765.094150] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.105421] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100840, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.158699] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Creating linked-clone VM from snapshot {{(pid=63538) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 765.159661] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6415a651-e673-452f-aaab-a6a472ea81bf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.171441] env[63538]: DEBUG oslo_vmware.api [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 765.171441] env[63538]: value = "task-5100841" [ 765.171441] env[63538]: _type = "Task" [ 765.171441] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.180949] env[63538]: DEBUG oslo_vmware.api [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100841, 'name': CloneVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.528670] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5100838, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.569785] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100839, 'name': CreateVM_Task, 'duration_secs': 0.442695} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.569966] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 765.570838] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.571336] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.571771] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 765.572079] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-211f6d6f-3a2b-4da7-9c55-771407fc1632 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.580106] env[63538]: DEBUG nova.scheduler.client.report [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 765.586026] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 765.586026] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52121477-63b5-a000-c580-fd8d9b3f2128" [ 765.586026] env[63538]: _type = "Task" [ 765.586026] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.612440] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52121477-63b5-a000-c580-fd8d9b3f2128, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.623080] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100840, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.123152} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.623080] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 765.623572] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c31bbe0-42ca-4e78-9df6-7053a6da3d46 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.657114] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Reconfiguring VM instance instance-00000031 to attach disk [datastore2] 46e2c1f4-edf7-45d6-ba77-c872005fcf1b/46e2c1f4-edf7-45d6-ba77-c872005fcf1b.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 765.657114] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d0578c6a-db56-46f1-a113-0340b4395b9f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.685702] env[63538]: DEBUG oslo_vmware.api [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100841, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.688829] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 765.688829] env[63538]: value = "task-5100842" [ 765.688829] env[63538]: _type = "Task" [ 765.688829] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.700636] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100842, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.029081] env[63538]: DEBUG oslo_vmware.api [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5100838, 'name': PowerOnVM_Task, 'duration_secs': 0.840399} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.029081] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 766.029081] env[63538]: INFO nova.compute.manager [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Took 9.28 seconds to spawn the instance on the hypervisor. [ 766.029081] env[63538]: DEBUG nova.compute.manager [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 766.030754] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cefb8be-f100-4945-a731-40ca00adb999 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.087229] env[63538]: DEBUG oslo_concurrency.lockutils [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.426s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.090252] env[63538]: DEBUG oslo_concurrency.lockutils [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 29.662s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.090481] env[63538]: DEBUG nova.objects.instance [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63538) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 766.110377] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52121477-63b5-a000-c580-fd8d9b3f2128, 'name': SearchDatastore_Task, 'duration_secs': 0.026972} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.111276] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.111767] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 766.111897] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.112024] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.113448] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 766.113448] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-187b6f13-26df-4a78-ba80-cd627d7660d4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.127324] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 766.127324] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 766.128125] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b9dfe06-4ed1-48f4-9051-518376332957 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.135223] env[63538]: INFO nova.scheduler.client.report [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Deleted allocations for instance 6f29f063-ddb5-491a-a1a0-7c9ed65a1718 [ 766.142621] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 766.142621] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b52741-24f3-41ce-318a-3a4f2d308060" [ 766.142621] env[63538]: _type = "Task" [ 766.142621] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.155834] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b52741-24f3-41ce-318a-3a4f2d308060, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.190039] env[63538]: DEBUG oslo_vmware.api [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100841, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.203287] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100842, 'name': ReconfigVM_Task, 'duration_secs': 0.477288} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.203511] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Reconfigured VM instance instance-00000031 to attach disk [datastore2] 46e2c1f4-edf7-45d6-ba77-c872005fcf1b/46e2c1f4-edf7-45d6-ba77-c872005fcf1b.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 766.205883] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7a790335-52c5-44ed-8302-17bbe846a273 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.214155] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 766.214155] env[63538]: value = "task-5100843" [ 766.214155] env[63538]: _type = "Task" [ 766.214155] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.228554] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100843, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.562489] env[63538]: INFO nova.compute.manager [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Took 42.09 seconds to build instance. [ 766.639496] env[63538]: DEBUG nova.virt.hardware [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 766.640967] env[63538]: DEBUG nova.virt.hardware [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 766.640967] env[63538]: DEBUG nova.virt.hardware [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 766.640967] env[63538]: DEBUG nova.virt.hardware [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 766.640967] env[63538]: DEBUG nova.virt.hardware [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 766.640967] env[63538]: DEBUG nova.virt.hardware [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 766.640967] env[63538]: DEBUG nova.virt.hardware [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 766.640967] env[63538]: DEBUG nova.virt.hardware [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 766.641332] env[63538]: DEBUG nova.virt.hardware [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 766.641332] env[63538]: DEBUG nova.virt.hardware [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 766.641537] env[63538]: DEBUG nova.virt.hardware [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 766.645177] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e674df-7e35-4c9a-8229-9224c3a1b005 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.649725] env[63538]: DEBUG oslo_concurrency.lockutils [None req-df5b1b1e-c015-4342-9228-a3ff855ef9ed tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lock "6f29f063-ddb5-491a-a1a0-7c9ed65a1718" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.950s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.655380] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b37d1450-596a-4d18-9432-3b3266c2f4fb tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lock "6f29f063-ddb5-491a-a1a0-7c9ed65a1718" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 30.853s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.656738] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b37d1450-596a-4d18-9432-3b3266c2f4fb tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquiring lock "6f29f063-ddb5-491a-a1a0-7c9ed65a1718-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.656738] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b37d1450-596a-4d18-9432-3b3266c2f4fb tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lock "6f29f063-ddb5-491a-a1a0-7c9ed65a1718-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.656738] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b37d1450-596a-4d18-9432-3b3266c2f4fb tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lock "6f29f063-ddb5-491a-a1a0-7c9ed65a1718-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.659772] env[63538]: INFO nova.compute.manager [None req-b37d1450-596a-4d18-9432-3b3266c2f4fb tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Terminating instance [ 766.665290] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b37d1450-596a-4d18-9432-3b3266c2f4fb tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquiring lock "refresh_cache-6f29f063-ddb5-491a-a1a0-7c9ed65a1718" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.665407] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b37d1450-596a-4d18-9432-3b3266c2f4fb tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquired lock "refresh_cache-6f29f063-ddb5-491a-a1a0-7c9ed65a1718" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.666949] env[63538]: DEBUG nova.network.neutron [None req-b37d1450-596a-4d18-9432-3b3266c2f4fb tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 766.672197] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20cd70c8-08f9-4838-b19d-9ec0375114a9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.677912] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b52741-24f3-41ce-318a-3a4f2d308060, 'name': SearchDatastore_Task, 'duration_secs': 0.023592} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.678639] env[63538]: DEBUG nova.compute.utils [None req-b37d1450-596a-4d18-9432-3b3266c2f4fb tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Can not refresh info_cache because instance was not found {{(pid=63538) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 766.686652] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-148b33bb-6fd6-4c31-9182-6851470356da {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.706658] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:f2:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b80dd748-3d7e-4a23-a38d-9e79a3881452', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2e0047f2-712d-4e63-b423-df4605d54382', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 766.714247] env[63538]: DEBUG oslo.service.loopingcall [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 766.715399] env[63538]: DEBUG nova.network.neutron [None req-b37d1450-596a-4d18-9432-3b3266c2f4fb tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 766.721232] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 766.721798] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7132b61a-332c-4b43-b14e-9b14e2d0b26b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.747377] env[63538]: DEBUG oslo_vmware.api [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100841, 'name': CloneVM_Task, 'duration_secs': 1.536381} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.747437] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 766.747437] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526201ba-40f8-28aa-f20e-d677bd7cf8af" [ 766.747437] env[63538]: _type = "Task" [ 766.747437] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.748437] env[63538]: INFO nova.virt.vmwareapi.vmops [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Created linked-clone VM from snapshot [ 766.749463] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c77e88ce-6fde-4465-8559-9d5e8220eb36 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.761686] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100843, 'name': Rename_Task, 'duration_secs': 0.200807} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.761956] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 766.761956] env[63538]: value = "task-5100844" [ 766.761956] env[63538]: _type = "Task" [ 766.761956] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.762968] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 766.763615] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b986293f-17fb-486e-a8e5-1f97e5c3a3d4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.782840] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Uploading image f8066d3d-967f-413a-a58d-c42d62d64b1b {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 766.785540] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526201ba-40f8-28aa-f20e-d677bd7cf8af, 'name': SearchDatastore_Task, 'duration_secs': 0.020007} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.786734] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.786856] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] c8a02fa6-5232-4dde-b6dd-0da1089b6bbf/c8a02fa6-5232-4dde-b6dd-0da1089b6bbf.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 766.788102] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-912645a7-1bdf-4333-bb60-ede7ce288ffd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.794127] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100844, 'name': CreateVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.798093] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 766.798093] env[63538]: value = "task-5100845" [ 766.798093] env[63538]: _type = "Task" [ 766.798093] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.798718] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Destroying the VM {{(pid=63538) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 766.798969] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-034da6b9-6ada-411a-9755-8ce027e0b6f9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.805870] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 766.805870] env[63538]: value = "task-5100846" [ 766.805870] env[63538]: _type = "Task" [ 766.805870] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.814361] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100845, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.816802] env[63538]: DEBUG oslo_vmware.api [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 766.816802] env[63538]: value = "task-5100847" [ 766.816802] env[63538]: _type = "Task" [ 766.816802] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.824053] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100846, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.831699] env[63538]: DEBUG oslo_vmware.api [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100847, 'name': Destroy_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.953176] env[63538]: DEBUG nova.network.neutron [None req-b37d1450-596a-4d18-9432-3b3266c2f4fb tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.063893] env[63538]: DEBUG oslo_concurrency.lockutils [None req-689f96cb-7b68-483d-8c8c-b68815aaba86 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "466be7db-79e4-49fd-aa3b-56fbe5c60457" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.840s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.105044] env[63538]: DEBUG oslo_concurrency.lockutils [None req-82d40cab-79ec-4cfe-9fde-716f2903bdf5 tempest-ServersAdmin275Test-2017083506 tempest-ServersAdmin275Test-2017083506-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.106789] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.511s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.110893] env[63538]: DEBUG nova.objects.instance [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lazy-loading 'resources' on Instance uuid f9fa5578-acf3-416f-9cb0-8ceb00e5132d {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 767.223903] env[63538]: DEBUG nova.compute.manager [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 767.225129] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15cb2151-fb5a-4d8e-9b76-62109d04a2ae {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.281215] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100844, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.319046] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100845, 'name': PowerOnVM_Task} progress is 78%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.327993] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100846, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.334999] env[63538]: DEBUG oslo_vmware.api [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100847, 'name': Destroy_Task, 'duration_secs': 0.48689} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.335290] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Destroyed the VM [ 767.335732] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Deleting Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 767.336389] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a3fef7ef-40f9-4c3a-8163-76b56ae577c8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.346156] env[63538]: DEBUG oslo_vmware.api [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 767.346156] env[63538]: value = "task-5100848" [ 767.346156] env[63538]: _type = "Task" [ 767.346156] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.358314] env[63538]: DEBUG oslo_vmware.api [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100848, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.462513] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b37d1450-596a-4d18-9432-3b3266c2f4fb tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Releasing lock "refresh_cache-6f29f063-ddb5-491a-a1a0-7c9ed65a1718" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.462513] env[63538]: DEBUG nova.compute.manager [None req-b37d1450-596a-4d18-9432-3b3266c2f4fb tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 767.462513] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b37d1450-596a-4d18-9432-3b3266c2f4fb tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 767.462513] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d28293dd-7b5b-48ce-bf8d-b9badd80b5d7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.483857] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37c4b239-b569-4b49-988e-aacc2abf6277 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.526980] env[63538]: WARNING nova.virt.vmwareapi.vmops [None req-b37d1450-596a-4d18-9432-3b3266c2f4fb tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6f29f063-ddb5-491a-a1a0-7c9ed65a1718 could not be found. [ 767.527506] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b37d1450-596a-4d18-9432-3b3266c2f4fb tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 767.527506] env[63538]: INFO nova.compute.manager [None req-b37d1450-596a-4d18-9432-3b3266c2f4fb tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Took 0.07 seconds to destroy the instance on the hypervisor. [ 767.527855] env[63538]: DEBUG oslo.service.loopingcall [None req-b37d1450-596a-4d18-9432-3b3266c2f4fb tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 767.528234] env[63538]: DEBUG nova.compute.manager [-] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 767.528366] env[63538]: DEBUG nova.network.neutron [-] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 767.565120] env[63538]: DEBUG nova.network.neutron [-] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 767.566723] env[63538]: DEBUG nova.compute.manager [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 767.616829] env[63538]: DEBUG nova.objects.instance [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lazy-loading 'numa_topology' on Instance uuid f9fa5578-acf3-416f-9cb0-8ceb00e5132d {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 767.741325] env[63538]: INFO nova.compute.manager [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] instance snapshotting [ 767.748846] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d74036ea-bbe7-4fc0-a147-e59e16d8ce86 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.774520] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5a3b8c9-d0fa-4fdd-8600-6402accca39a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.781170] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100844, 'name': CreateVM_Task, 'duration_secs': 0.636006} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.784033] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 767.787059] env[63538]: DEBUG oslo_concurrency.lockutils [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 767.787300] env[63538]: DEBUG oslo_concurrency.lockutils [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.787704] env[63538]: DEBUG oslo_concurrency.lockutils [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 767.788317] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03f6655b-f96b-4d40-a798-296f3326fd3d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.795022] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 767.795022] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527fe620-e558-5dec-c2c2-d7fb9cb2820d" [ 767.795022] env[63538]: _type = "Task" [ 767.795022] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.809368] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527fe620-e558-5dec-c2c2-d7fb9cb2820d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.816811] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100845, 'name': PowerOnVM_Task, 'duration_secs': 0.996332} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.817364] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 767.817725] env[63538]: INFO nova.compute.manager [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Took 7.80 seconds to spawn the instance on the hypervisor. [ 767.817866] env[63538]: DEBUG nova.compute.manager [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 767.818734] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6341a8c1-f268-4bea-8e64-819fa93e6b23 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.826165] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100846, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.642354} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.826165] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] c8a02fa6-5232-4dde-b6dd-0da1089b6bbf/c8a02fa6-5232-4dde-b6dd-0da1089b6bbf.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 767.826506] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 767.826868] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-54fa0cd2-feb9-4d75-aef1-6a62559dea58 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.840112] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 767.840112] env[63538]: value = "task-5100849" [ 767.840112] env[63538]: _type = "Task" [ 767.840112] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.855724] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100849, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.862825] env[63538]: DEBUG oslo_vmware.api [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100848, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.070156] env[63538]: DEBUG nova.network.neutron [-] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.098925] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.123165] env[63538]: DEBUG nova.objects.base [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=63538) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 768.291032] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Creating Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 768.292635] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f7403b7f-e12e-438f-8f9c-8ed543097a89 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.301407] env[63538]: DEBUG oslo_vmware.api [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Waiting for the task: (returnval){ [ 768.301407] env[63538]: value = "task-5100850" [ 768.301407] env[63538]: _type = "Task" [ 768.301407] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.310767] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527fe620-e558-5dec-c2c2-d7fb9cb2820d, 'name': SearchDatastore_Task, 'duration_secs': 0.013034} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.314561] env[63538]: DEBUG oslo_concurrency.lockutils [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 768.314892] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 768.315219] env[63538]: DEBUG oslo_concurrency.lockutils [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 768.315414] env[63538]: DEBUG oslo_concurrency.lockutils [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.315621] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 768.316507] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0eb91dc4-e409-4bc2-a613-09d0748471f0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.322085] env[63538]: DEBUG oslo_vmware.api [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100850, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.334206] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 768.334206] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 768.341785] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6ce1d9d-09c4-4af2-ad23-e46815623e5c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.351449] env[63538]: INFO nova.compute.manager [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Took 42.55 seconds to build instance. [ 768.361261] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 768.361261] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cd5b00-2a90-4f61-1de7-94ac377f1d2a" [ 768.361261] env[63538]: _type = "Task" [ 768.361261] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.371758] env[63538]: DEBUG oslo_vmware.api [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100848, 'name': RemoveSnapshot_Task, 'duration_secs': 0.920345} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.372182] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100849, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073271} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.376131] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Deleted Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 768.379155] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 768.389609] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc54a8f4-2043-4a9d-b560-0ef1c59630df {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.393962] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cd5b00-2a90-4f61-1de7-94ac377f1d2a, 'name': SearchDatastore_Task, 'duration_secs': 0.01689} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.399666] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-772932bd-0025-49c8-aeb7-bcb75c8a2557 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.423391] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] c8a02fa6-5232-4dde-b6dd-0da1089b6bbf/c8a02fa6-5232-4dde-b6dd-0da1089b6bbf.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 768.428099] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5268de8-8ed6-41d1-8d88-3657fc8090c3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.448452] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 768.448452] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a96286-de6e-28d9-e2d2-d450f740d058" [ 768.448452] env[63538]: _type = "Task" [ 768.448452] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.456671] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 768.456671] env[63538]: value = "task-5100851" [ 768.456671] env[63538]: _type = "Task" [ 768.456671] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.468565] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a96286-de6e-28d9-e2d2-d450f740d058, 'name': SearchDatastore_Task, 'duration_secs': 0.019601} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.469395] env[63538]: DEBUG oslo_concurrency.lockutils [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 768.469742] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 707a79e2-f5db-479c-b719-1e040935cda3/707a79e2-f5db-479c-b719-1e040935cda3.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 768.470090] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-36959e70-f75c-42f2-899b-181571142cce {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.476830] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100851, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.483137] env[63538]: DEBUG nova.compute.manager [req-09ea08bd-5a48-44c5-9cc3-460f6a2b8dc9 req-ce82ad77-7850-49a7-8658-ffd05b37fa76 service nova] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Received event network-changed-a679ee9b-3e51-4ce7-ab24-0792218d36ba {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 768.483375] env[63538]: DEBUG nova.compute.manager [req-09ea08bd-5a48-44c5-9cc3-460f6a2b8dc9 req-ce82ad77-7850-49a7-8658-ffd05b37fa76 service nova] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Refreshing instance network info cache due to event network-changed-a679ee9b-3e51-4ce7-ab24-0792218d36ba. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 768.483688] env[63538]: DEBUG oslo_concurrency.lockutils [req-09ea08bd-5a48-44c5-9cc3-460f6a2b8dc9 req-ce82ad77-7850-49a7-8658-ffd05b37fa76 service nova] Acquiring lock "refresh_cache-466be7db-79e4-49fd-aa3b-56fbe5c60457" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 768.483873] env[63538]: DEBUG oslo_concurrency.lockutils [req-09ea08bd-5a48-44c5-9cc3-460f6a2b8dc9 req-ce82ad77-7850-49a7-8658-ffd05b37fa76 service nova] Acquired lock "refresh_cache-466be7db-79e4-49fd-aa3b-56fbe5c60457" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.484088] env[63538]: DEBUG nova.network.neutron [req-09ea08bd-5a48-44c5-9cc3-460f6a2b8dc9 req-ce82ad77-7850-49a7-8658-ffd05b37fa76 service nova] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Refreshing network info cache for port a679ee9b-3e51-4ce7-ab24-0792218d36ba {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 768.488411] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 768.488411] env[63538]: value = "task-5100852" [ 768.488411] env[63538]: _type = "Task" [ 768.488411] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.499403] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100852, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.579953] env[63538]: INFO nova.compute.manager [-] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Took 1.05 seconds to deallocate network for instance. [ 768.766191] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df73086e-474d-4345-867d-0e3045e3e4a9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.777131] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-379957d3-3e56-4383-94b4-2bbcc2ffd5c4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.846494] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feb5c4d3-49b6-4f1b-918d-43f55e7da7ce {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.856905] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "46e2c1f4-edf7-45d6-ba77-c872005fcf1b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.857s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.857253] env[63538]: DEBUG oslo_vmware.api [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100850, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.863577] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ea095b-de5b-469b-ba84-300c74c55b6c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.888561] env[63538]: WARNING nova.compute.manager [None req-d584eaae-8b74-47ae-a3f8-2eccbfd86855 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Image not found during snapshot: nova.exception.ImageNotFound: Image f8066d3d-967f-413a-a58d-c42d62d64b1b could not be found. [ 768.890241] env[63538]: DEBUG nova.compute.provider_tree [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 768.971240] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100851, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.003316] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100852, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.072062] env[63538]: DEBUG oslo_concurrency.lockutils [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquiring lock "dbf48807-08a7-46d1-8454-42437a9f87c0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.073362] env[63538]: DEBUG oslo_concurrency.lockutils [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lock "dbf48807-08a7-46d1-8454-42437a9f87c0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.074740] env[63538]: DEBUG oslo_concurrency.lockutils [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquiring lock "dbf48807-08a7-46d1-8454-42437a9f87c0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.077396] env[63538]: DEBUG oslo_concurrency.lockutils [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lock "dbf48807-08a7-46d1-8454-42437a9f87c0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.077742] env[63538]: DEBUG oslo_concurrency.lockutils [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lock "dbf48807-08a7-46d1-8454-42437a9f87c0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.003s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.083860] env[63538]: INFO nova.compute.manager [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Terminating instance [ 769.086487] env[63538]: DEBUG nova.compute.manager [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 769.086766] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 769.087871] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3aa822a-7f4a-4a07-996c-3f4bef2b3a48 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.091479] env[63538]: INFO nova.compute.manager [None req-b37d1450-596a-4d18-9432-3b3266c2f4fb tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Instance disappeared during terminate [ 769.091791] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b37d1450-596a-4d18-9432-3b3266c2f4fb tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lock "6f29f063-ddb5-491a-a1a0-7c9ed65a1718" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.437s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.103872] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 769.104304] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c011c2ac-c045-4738-bc60-63740c0e0e5d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.115557] env[63538]: DEBUG oslo_vmware.api [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Waiting for the task: (returnval){ [ 769.115557] env[63538]: value = "task-5100853" [ 769.115557] env[63538]: _type = "Task" [ 769.115557] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.130591] env[63538]: DEBUG oslo_vmware.api [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100853, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.355018] env[63538]: DEBUG oslo_vmware.api [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100850, 'name': CreateSnapshot_Task, 'duration_secs': 1.013963} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.358367] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Created Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 769.359323] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f4b88d-410a-4751-9a30-2ff2311c4447 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.369052] env[63538]: DEBUG nova.compute.manager [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 769.427255] env[63538]: ERROR nova.scheduler.client.report [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [req-7b99e989-69bc-4321-968a-8aead180f0c2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f65218a4-1d3d-476a-9093-01cae92c8635. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7b99e989-69bc-4321-968a-8aead180f0c2"}]} [ 769.453062] env[63538]: DEBUG nova.scheduler.client.report [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Refreshing inventories for resource provider f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 769.474840] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100851, 'name': ReconfigVM_Task, 'duration_secs': 0.792196} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.476414] env[63538]: DEBUG nova.scheduler.client.report [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Updating ProviderTree inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 769.476778] env[63538]: DEBUG nova.compute.provider_tree [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 769.480046] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Reconfigured VM instance instance-00000006 to attach disk [datastore2] c8a02fa6-5232-4dde-b6dd-0da1089b6bbf/c8a02fa6-5232-4dde-b6dd-0da1089b6bbf.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 769.481425] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d8c2dfc9-4cbf-41f8-9114-fa6c24855d60 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.490713] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 769.490713] env[63538]: value = "task-5100854" [ 769.490713] env[63538]: _type = "Task" [ 769.490713] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.499592] env[63538]: DEBUG nova.scheduler.client.report [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Refreshing aggregate associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, aggregates: None {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 769.528219] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100854, 'name': Rename_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.533378] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100852, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.670896} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.535365] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 707a79e2-f5db-479c-b719-1e040935cda3/707a79e2-f5db-479c-b719-1e040935cda3.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 769.535365] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 769.535365] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a184fa86-4fc1-40e1-b497-27ea18dd1047 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.542704] env[63538]: DEBUG nova.scheduler.client.report [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Refreshing trait associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 769.548371] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 769.548371] env[63538]: value = "task-5100855" [ 769.548371] env[63538]: _type = "Task" [ 769.548371] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.562650] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100855, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.575311] env[63538]: DEBUG nova.network.neutron [req-09ea08bd-5a48-44c5-9cc3-460f6a2b8dc9 req-ce82ad77-7850-49a7-8658-ffd05b37fa76 service nova] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Updated VIF entry in instance network info cache for port a679ee9b-3e51-4ce7-ab24-0792218d36ba. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 769.578128] env[63538]: DEBUG nova.network.neutron [req-09ea08bd-5a48-44c5-9cc3-460f6a2b8dc9 req-ce82ad77-7850-49a7-8658-ffd05b37fa76 service nova] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Updating instance_info_cache with network_info: [{"id": "a679ee9b-3e51-4ce7-ab24-0792218d36ba", "address": "fa:16:3e:08:aa:47", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa679ee9b-3e", "ovs_interfaceid": "a679ee9b-3e51-4ce7-ab24-0792218d36ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.639996] env[63538]: DEBUG oslo_vmware.api [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100853, 'name': PowerOffVM_Task, 'duration_secs': 0.2992} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.640454] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 769.640855] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 769.641148] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-907c8a71-ecb1-4517-8346-d692a5fee565 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.728177] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 769.728403] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 769.728589] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Deleting the datastore file [datastore1] dbf48807-08a7-46d1-8454-42437a9f87c0 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 769.729157] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6c48d9b8-b511-43ff-91da-0c886ee4f9d0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.739019] env[63538]: DEBUG oslo_vmware.api [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Waiting for the task: (returnval){ [ 769.739019] env[63538]: value = "task-5100857" [ 769.739019] env[63538]: _type = "Task" [ 769.739019] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.754903] env[63538]: DEBUG oslo_vmware.api [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100857, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.901563] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Creating linked-clone VM from snapshot {{(pid=63538) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 769.907611] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8dda2b2d-f176-4119-abd5-1d748f6820d8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.925569] env[63538]: DEBUG oslo_vmware.api [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Waiting for the task: (returnval){ [ 769.925569] env[63538]: value = "task-5100858" [ 769.925569] env[63538]: _type = "Task" [ 769.925569] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.932754] env[63538]: DEBUG oslo_concurrency.lockutils [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.941373] env[63538]: DEBUG oslo_vmware.api [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100858, 'name': CloneVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.007028] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100854, 'name': Rename_Task, 'duration_secs': 0.351419} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.007338] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 770.008009] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-69064448-c6a0-4388-8629-de2f4929707b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.016072] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 770.016072] env[63538]: value = "task-5100859" [ 770.016072] env[63538]: _type = "Task" [ 770.016072] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.029804] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100859, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.063396] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100855, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.176364} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.064044] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 770.068633] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f42127b1-ce21-486e-8553-03802db27f34 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.079831] env[63538]: DEBUG oslo_concurrency.lockutils [req-09ea08bd-5a48-44c5-9cc3-460f6a2b8dc9 req-ce82ad77-7850-49a7-8658-ffd05b37fa76 service nova] Releasing lock "refresh_cache-466be7db-79e4-49fd-aa3b-56fbe5c60457" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 770.114965] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] 707a79e2-f5db-479c-b719-1e040935cda3/707a79e2-f5db-479c-b719-1e040935cda3.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 770.119517] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab1f71f2-e0a2-4b70-9f5d-ccd3ab574875 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.158726] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 770.158726] env[63538]: value = "task-5100860" [ 770.158726] env[63538]: _type = "Task" [ 770.158726] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.167802] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100860, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.252286] env[63538]: DEBUG oslo_vmware.api [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Task: {'id': task-5100857, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.396896} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.255373] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 770.255596] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 770.256205] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 770.256205] env[63538]: INFO nova.compute.manager [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Took 1.17 seconds to destroy the instance on the hypervisor. [ 770.256335] env[63538]: DEBUG oslo.service.loopingcall [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 770.256821] env[63538]: DEBUG nova.compute.manager [-] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 770.256923] env[63538]: DEBUG nova.network.neutron [-] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 770.318608] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8827293e-c94d-4cfd-af80-27b51cd27acd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.330649] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d21bdabf-b969-42eb-8c32-13db139d3951 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.384356] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49444a76-8d33-4ee3-bb63-e1cb1a58c93e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.396170] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-162dcf16-278b-4b15-a6dd-ab7163763e24 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.421391] env[63538]: DEBUG nova.compute.provider_tree [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 770.442619] env[63538]: DEBUG oslo_vmware.api [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100858, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.451388] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquiring lock "db5993ce-6982-4b82-8f5d-3fe51df8896b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.452249] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "db5993ce-6982-4b82-8f5d-3fe51df8896b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.452613] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquiring lock "db5993ce-6982-4b82-8f5d-3fe51df8896b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.452613] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "db5993ce-6982-4b82-8f5d-3fe51df8896b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.453054] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "db5993ce-6982-4b82-8f5d-3fe51df8896b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 770.456595] env[63538]: INFO nova.compute.manager [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Terminating instance [ 770.463439] env[63538]: DEBUG nova.compute.manager [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 770.463439] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 770.463793] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d492b132-855c-4f3b-8556-8adae48777ae {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.477240] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 770.477658] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f494ec2e-d134-435a-ae9e-9bf9e3abbe1f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.489706] env[63538]: DEBUG oslo_vmware.api [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 770.489706] env[63538]: value = "task-5100861" [ 770.489706] env[63538]: _type = "Task" [ 770.489706] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.504236] env[63538]: DEBUG oslo_vmware.api [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100861, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.529489] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100859, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.671129] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100860, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.900304] env[63538]: DEBUG nova.compute.manager [req-7f33f17b-64f7-455c-90df-8a6dadd33d80 req-07d40e6a-ce01-4f2c-ac1f-dc68a01bab6c service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Received event network-vif-deleted-38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 770.900825] env[63538]: INFO nova.compute.manager [req-7f33f17b-64f7-455c-90df-8a6dadd33d80 req-07d40e6a-ce01-4f2c-ac1f-dc68a01bab6c service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Neutron deleted interface 38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e; detaching it from the instance and deleting it from the info cache [ 770.900934] env[63538]: DEBUG nova.network.neutron [req-7f33f17b-64f7-455c-90df-8a6dadd33d80 req-07d40e6a-ce01-4f2c-ac1f-dc68a01bab6c service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.926448] env[63538]: DEBUG nova.scheduler.client.report [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 770.947531] env[63538]: DEBUG oslo_vmware.api [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100858, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.003496] env[63538]: DEBUG oslo_vmware.api [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100861, 'name': PowerOffVM_Task, 'duration_secs': 0.260681} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.007018] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 771.007018] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 771.007018] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c7c9264a-6e58-4258-88ac-7586df48e38d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.028452] env[63538]: DEBUG oslo_vmware.api [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100859, 'name': PowerOnVM_Task, 'duration_secs': 0.890842} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.028730] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 771.028942] env[63538]: DEBUG nova.compute.manager [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 771.029845] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d67acb98-5bf4-403c-9eda-9475ba722965 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.084686] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 771.084925] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 771.085132] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Deleting the datastore file [datastore1] db5993ce-6982-4b82-8f5d-3fe51df8896b {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 771.085426] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d5914c60-7afe-41d5-9714-28ad76653980 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.098076] env[63538]: DEBUG oslo_vmware.api [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 771.098076] env[63538]: value = "task-5100863" [ 771.098076] env[63538]: _type = "Task" [ 771.098076] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.110532] env[63538]: DEBUG oslo_vmware.api [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100863, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.170408] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100860, 'name': ReconfigVM_Task, 'duration_secs': 0.526295} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.170706] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Reconfigured VM instance instance-0000002f to attach disk [datastore2] 707a79e2-f5db-479c-b719-1e040935cda3/707a79e2-f5db-479c-b719-1e040935cda3.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 771.171382] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4fd160de-d06e-4c38-8a2a-22174388a2e0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.178741] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 771.178741] env[63538]: value = "task-5100864" [ 771.178741] env[63538]: _type = "Task" [ 771.178741] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.187972] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100864, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.251455] env[63538]: DEBUG nova.network.neutron [-] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.406757] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-22cacdd3-1354-46dc-a5ce-2a5bc9ca0638 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.419151] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a56bef34-0993-4757-996a-f143cd7ff811 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.443360] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.337s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.445992] env[63538]: DEBUG oslo_vmware.api [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100858, 'name': CloneVM_Task} progress is 95%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.448534] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.477s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.448902] env[63538]: DEBUG nova.objects.instance [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lazy-loading 'resources' on Instance uuid 65fc18ff-8901-40d2-8a5b-640eb9768240 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 771.470125] env[63538]: DEBUG nova.compute.manager [req-7f33f17b-64f7-455c-90df-8a6dadd33d80 req-07d40e6a-ce01-4f2c-ac1f-dc68a01bab6c service nova] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Detach interface failed, port_id=38f5393e-f3f1-4497-8d7d-c3aa98a1fc8e, reason: Instance dbf48807-08a7-46d1-8454-42437a9f87c0 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 771.553827] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.611365] env[63538]: DEBUG oslo_vmware.api [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5100863, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.292986} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.612233] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 771.612233] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 771.612233] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 771.612233] env[63538]: INFO nova.compute.manager [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Took 1.15 seconds to destroy the instance on the hypervisor. [ 771.612491] env[63538]: DEBUG oslo.service.loopingcall [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 771.612788] env[63538]: DEBUG nova.compute.manager [-] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 771.612788] env[63538]: DEBUG nova.network.neutron [-] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 771.695737] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100864, 'name': Rename_Task, 'duration_secs': 0.24346} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.697109] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 771.697109] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5d5d1f91-a2a1-4cd4-bd81-fb92cefe49a9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.706088] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 771.706088] env[63538]: value = "task-5100865" [ 771.706088] env[63538]: _type = "Task" [ 771.706088] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.717013] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100865, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.754519] env[63538]: INFO nova.compute.manager [-] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Took 1.50 seconds to deallocate network for instance. [ 771.953831] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2131dcd3-d463-45a0-b420-f1cfe6ca4737 tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "f9fa5578-acf3-416f-9cb0-8ceb00e5132d" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 54.177s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.956723] env[63538]: DEBUG oslo_vmware.api [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100858, 'name': CloneVM_Task, 'duration_secs': 1.644043} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.960626] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "f9fa5578-acf3-416f-9cb0-8ceb00e5132d" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 32.864s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.960927] env[63538]: INFO nova.compute.manager [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Unshelving [ 771.962824] env[63538]: INFO nova.virt.vmwareapi.vmops [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Created linked-clone VM from snapshot [ 771.964900] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c104eb-bfaf-4c4b-bfb4-f90c186d3a52 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.975689] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Uploading image d24e9e72-5908-4cc3-b92f-fe3db218cf40 {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 772.032887] env[63538]: DEBUG oslo_vmware.rw_handles [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 772.032887] env[63538]: value = "vm-992372" [ 772.032887] env[63538]: _type = "VirtualMachine" [ 772.032887] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 772.035022] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-9218a8df-7e09-475c-9566-04a9d033c99d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.044984] env[63538]: DEBUG oslo_vmware.rw_handles [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Lease: (returnval){ [ 772.044984] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527c562d-1267-46e8-1a9f-80db687ecb49" [ 772.044984] env[63538]: _type = "HttpNfcLease" [ 772.044984] env[63538]: } obtained for exporting VM: (result){ [ 772.044984] env[63538]: value = "vm-992372" [ 772.044984] env[63538]: _type = "VirtualMachine" [ 772.044984] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 772.045412] env[63538]: DEBUG oslo_vmware.api [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Waiting for the lease: (returnval){ [ 772.045412] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527c562d-1267-46e8-1a9f-80db687ecb49" [ 772.045412] env[63538]: _type = "HttpNfcLease" [ 772.045412] env[63538]: } to be ready. {{(pid=63538) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 772.055376] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 772.055376] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527c562d-1267-46e8-1a9f-80db687ecb49" [ 772.055376] env[63538]: _type = "HttpNfcLease" [ 772.055376] env[63538]: } is initializing. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 772.099966] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83620952-1af9-4026-a322-982b66163c97 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.109361] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-891be2d9-8042-42b1-848b-86a4746d7489 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.157721] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af15c133-2ce7-45ea-9294-9f7452a8af75 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.168345] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6bfadfe-2457-4475-a76a-9c783f127c21 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.184680] env[63538]: DEBUG nova.compute.provider_tree [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 772.219208] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100865, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.223813] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Acquiring lock "0e718984-cfce-4620-9be6-fdcfb4954da8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.224163] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Lock "0e718984-cfce-4620-9be6-fdcfb4954da8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.265682] env[63538]: DEBUG oslo_concurrency.lockutils [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.538548] env[63538]: DEBUG nova.network.neutron [-] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.555843] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 772.555843] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527c562d-1267-46e8-1a9f-80db687ecb49" [ 772.555843] env[63538]: _type = "HttpNfcLease" [ 772.555843] env[63538]: } is ready. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 772.555843] env[63538]: DEBUG oslo_vmware.rw_handles [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 772.555843] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527c562d-1267-46e8-1a9f-80db687ecb49" [ 772.555843] env[63538]: _type = "HttpNfcLease" [ 772.555843] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 772.555843] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-282b7ef3-fb17-4e1f-9e2c-d9759bb71563 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.563956] env[63538]: DEBUG oslo_vmware.rw_handles [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c525bc-983a-b0f6-f74e-ba949242e4fc/disk-0.vmdk from lease info. {{(pid=63538) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 772.564191] env[63538]: DEBUG oslo_vmware.rw_handles [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c525bc-983a-b0f6-f74e-ba949242e4fc/disk-0.vmdk for reading. {{(pid=63538) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 772.675857] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-10e21196-8aac-4477-95ff-5d8791b82ab9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.687849] env[63538]: DEBUG nova.scheduler.client.report [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 772.726956] env[63538]: DEBUG oslo_vmware.api [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100865, 'name': PowerOnVM_Task, 'duration_secs': 0.698094} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.727327] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 772.727511] env[63538]: DEBUG nova.compute.manager [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 772.728630] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e411ad8-42ab-445c-9616-e1c8ea3c2bbc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.914962] env[63538]: DEBUG oslo_vmware.rw_handles [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b9c76e-274a-0d4e-ed48-9f20a6a40ad7/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 772.917043] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acbf740c-c9ff-484c-8b5a-32e9144497f7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.923895] env[63538]: DEBUG oslo_vmware.rw_handles [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b9c76e-274a-0d4e-ed48-9f20a6a40ad7/disk-0.vmdk is in state: ready. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 772.924206] env[63538]: ERROR oslo_vmware.rw_handles [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b9c76e-274a-0d4e-ed48-9f20a6a40ad7/disk-0.vmdk due to incomplete transfer. [ 772.924360] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-74b4c37c-6cbd-462f-95aa-a17c92de7fb9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.933325] env[63538]: DEBUG nova.compute.manager [req-10add3e5-ea6f-4a65-ae68-e44ccb14be1c req-9c2ae585-eb31-4c40-bb52-af8b8d6847d2 service nova] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Received event network-vif-deleted-1d37786b-c74d-41d0-a685-3082d8f007be {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 772.937382] env[63538]: DEBUG oslo_vmware.rw_handles [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b9c76e-274a-0d4e-ed48-9f20a6a40ad7/disk-0.vmdk. {{(pid=63538) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 772.937563] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Uploaded image 84b51683-af18-40a9-9328-0e3969474fb9 to the Glance image server {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 772.940113] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Destroying the VM {{(pid=63538) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 772.940800] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e9bb6eca-a4c0-47fc-a861-ee65377bd95a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.948965] env[63538]: DEBUG oslo_vmware.api [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 772.948965] env[63538]: value = "task-5100867" [ 772.948965] env[63538]: _type = "Task" [ 772.948965] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.958740] env[63538]: DEBUG oslo_vmware.api [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100867, 'name': Destroy_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.983377] env[63538]: INFO nova.compute.manager [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Rebuilding instance [ 773.005470] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.031019] env[63538]: DEBUG nova.compute.manager [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 773.031019] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a5c9aee-a9f4-48bb-9710-ded5e4cd3a96 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.041880] env[63538]: INFO nova.compute.manager [-] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Took 1.43 seconds to deallocate network for instance. [ 773.193100] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.745s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.195783] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.052s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.196255] env[63538]: DEBUG nova.objects.instance [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Lazy-loading 'resources' on Instance uuid fd650fdc-6b49-4051-8267-bbd1f0cb86f1 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 773.223263] env[63538]: INFO nova.scheduler.client.report [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Deleted allocations for instance 65fc18ff-8901-40d2-8a5b-640eb9768240 [ 773.252092] env[63538]: DEBUG oslo_concurrency.lockutils [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.459994] env[63538]: DEBUG oslo_vmware.api [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100867, 'name': Destroy_Task} progress is 33%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.542359] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 773.542726] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-79079eab-594c-42c7-8a6a-be06ae416707 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.551466] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.552297] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 773.552297] env[63538]: value = "task-5100868" [ 773.552297] env[63538]: _type = "Task" [ 773.552297] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.566303] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100868, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.736960] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d940eddf-3562-4d45-bbc7-41f522f2ed1e tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "65fc18ff-8901-40d2-8a5b-640eb9768240" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.797s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.949270] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "04dc612b-7987-405b-9716-95c4ff3535ec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.949561] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "04dc612b-7987-405b-9716-95c4ff3535ec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.949857] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "04dc612b-7987-405b-9716-95c4ff3535ec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.950229] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "04dc612b-7987-405b-9716-95c4ff3535ec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.950468] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "04dc612b-7987-405b-9716-95c4ff3535ec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.953222] env[63538]: INFO nova.compute.manager [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Terminating instance [ 773.958635] env[63538]: DEBUG nova.compute.manager [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 773.959636] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 773.959985] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d990b1e8-1059-42d4-906e-878ec14f5e03 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.972432] env[63538]: DEBUG oslo_vmware.api [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100867, 'name': Destroy_Task, 'duration_secs': 0.679833} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.975156] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Destroyed the VM [ 773.975592] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Deleting Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 773.976040] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 773.976429] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e0961e3c-1a2a-45a7-920d-d5a72a85e9ca {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.978411] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6e86fa3e-f8b5-42e5-af95-afc231919e92 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.992157] env[63538]: DEBUG oslo_vmware.api [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 773.992157] env[63538]: value = "task-5100870" [ 773.992157] env[63538]: _type = "Task" [ 773.992157] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.002975] env[63538]: DEBUG oslo_vmware.api [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100870, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.014900] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "707a79e2-f5db-479c-b719-1e040935cda3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.015260] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "707a79e2-f5db-479c-b719-1e040935cda3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.015597] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "707a79e2-f5db-479c-b719-1e040935cda3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.016046] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "707a79e2-f5db-479c-b719-1e040935cda3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.016151] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "707a79e2-f5db-479c-b719-1e040935cda3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 774.018838] env[63538]: INFO nova.compute.manager [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Terminating instance [ 774.023878] env[63538]: DEBUG nova.compute.manager [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 774.024299] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 774.025029] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9664b05c-89d2-4507-846d-340e1277db05 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.036441] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 774.039352] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a3270dac-0601-40df-8257-e87e497f39ed {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.048619] env[63538]: DEBUG oslo_vmware.api [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 774.048619] env[63538]: value = "task-5100871" [ 774.048619] env[63538]: _type = "Task" [ 774.048619] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.058209] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 774.058209] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 774.058209] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Deleting the datastore file [datastore1] 04dc612b-7987-405b-9716-95c4ff3535ec {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 774.062451] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f63f94a2-88ad-4a3d-aed1-25af28ffd4f7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.072947] env[63538]: DEBUG oslo_vmware.api [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100871, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.081744] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100868, 'name': PowerOffVM_Task, 'duration_secs': 0.385226} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.083710] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 774.084118] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 774.084677] env[63538]: DEBUG oslo_vmware.api [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 774.084677] env[63538]: value = "task-5100872" [ 774.084677] env[63538]: _type = "Task" [ 774.084677] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.087263] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89352424-aa43-4bca-8993-c65cd4228067 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.105901] env[63538]: DEBUG oslo_vmware.api [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5100872, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.110105] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 774.112466] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6d6f36e0-7334-4843-bfe3-8bb726e9b4df {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.204825] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 774.204825] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 774.206563] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Deleting the datastore file [datastore2] c8a02fa6-5232-4dde-b6dd-0da1089b6bbf {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 774.206563] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57d1e093-04c3-4489-b9fd-508b51f3a668 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.214035] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 774.214035] env[63538]: value = "task-5100874" [ 774.214035] env[63538]: _type = "Task" [ 774.214035] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.226689] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100874, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.375746] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd80c469-3b0b-46b3-8e05-7819f591b413 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.384210] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aece583c-cf6a-4891-ba75-e3e60c6ef6f8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.428484] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a7e952c-6dc3-42e2-87f7-5b617f6336fa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.437859] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d3da6eb-1eb0-4b17-9d6f-7c339ed5468b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.454052] env[63538]: DEBUG nova.compute.provider_tree [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 774.506194] env[63538]: DEBUG nova.compute.utils [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Cleaning up image 84b51683-af18-40a9-9328-0e3969474fb9 {{(pid=63538) delete_image /opt/stack/nova/nova/compute/utils.py:1322}} [ 774.559984] env[63538]: DEBUG oslo_vmware.api [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100871, 'name': PowerOffVM_Task, 'duration_secs': 0.272644} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.560368] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 774.560639] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 774.560898] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-feeb7f96-fa88-4452-8b93-5736359a6f02 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.601947] env[63538]: WARNING nova.virt.vmwareapi.vmops [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] In vmwareapi:vmops:_destroy_instance, exception while deleting the VM contents from the disk: oslo_vmware.exceptions.CannotDeleteFileException: Cannot delete file [datastore1] 04dc612b-7987-405b-9716-95c4ff3535ec [ 774.601947] env[63538]: ERROR nova.virt.vmwareapi.vmops [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Traceback (most recent call last): [ 774.601947] env[63538]: ERROR nova.virt.vmwareapi.vmops [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1114, in _destroy_instance [ 774.601947] env[63538]: ERROR nova.virt.vmwareapi.vmops [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] ds_util.file_delete(self._session, [ 774.601947] env[63538]: ERROR nova.virt.vmwareapi.vmops [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] File "/opt/stack/nova/nova/virt/vmwareapi/ds_util.py", line 219, in file_delete [ 774.601947] env[63538]: ERROR nova.virt.vmwareapi.vmops [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] session._wait_for_task(file_delete_task) [ 774.601947] env[63538]: ERROR nova.virt.vmwareapi.vmops [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 774.601947] env[63538]: ERROR nova.virt.vmwareapi.vmops [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] return self.wait_for_task(task_ref) [ 774.601947] env[63538]: ERROR nova.virt.vmwareapi.vmops [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 774.601947] env[63538]: ERROR nova.virt.vmwareapi.vmops [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] return evt.wait() [ 774.601947] env[63538]: ERROR nova.virt.vmwareapi.vmops [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 774.601947] env[63538]: ERROR nova.virt.vmwareapi.vmops [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] result = hub.switch() [ 774.601947] env[63538]: ERROR nova.virt.vmwareapi.vmops [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 774.601947] env[63538]: ERROR nova.virt.vmwareapi.vmops [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] return self.greenlet.switch() [ 774.601947] env[63538]: ERROR nova.virt.vmwareapi.vmops [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 774.601947] env[63538]: ERROR nova.virt.vmwareapi.vmops [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] self.f(*self.args, **self.kw) [ 774.601947] env[63538]: ERROR nova.virt.vmwareapi.vmops [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 774.601947] env[63538]: ERROR nova.virt.vmwareapi.vmops [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] raise exceptions.translate_fault(task_info.error) [ 774.601947] env[63538]: ERROR nova.virt.vmwareapi.vmops [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] oslo_vmware.exceptions.CannotDeleteFileException: Cannot delete file [datastore1] 04dc612b-7987-405b-9716-95c4ff3535ec [ 774.601947] env[63538]: ERROR nova.virt.vmwareapi.vmops [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] [ 774.602753] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 774.602753] env[63538]: INFO nova.compute.manager [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Took 0.64 seconds to destroy the instance on the hypervisor. [ 774.602753] env[63538]: DEBUG oslo.service.loopingcall [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 774.603381] env[63538]: DEBUG nova.compute.manager [-] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 774.603542] env[63538]: DEBUG nova.network.neutron [-] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 774.626542] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 774.626780] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 774.626978] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Deleting the datastore file [datastore2] 707a79e2-f5db-479c-b719-1e040935cda3 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 774.627289] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ee800cbd-4e17-40ff-a4cf-0a8353de92bc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.637722] env[63538]: DEBUG oslo_vmware.api [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 774.637722] env[63538]: value = "task-5100876" [ 774.637722] env[63538]: _type = "Task" [ 774.637722] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.647226] env[63538]: DEBUG oslo_vmware.api [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100876, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.694256] env[63538]: DEBUG oslo_concurrency.lockutils [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "ade3cce6-5662-4199-96f4-398436f840d8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.694508] env[63538]: DEBUG oslo_concurrency.lockutils [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "ade3cce6-5662-4199-96f4-398436f840d8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.725811] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100874, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.336038} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.726808] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 774.726808] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 774.726808] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 774.815183] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "9c1f7da8-59f6-45bc-8d5f-23c8ec760829" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.815458] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "9c1f7da8-59f6-45bc-8d5f-23c8ec760829" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.959182] env[63538]: DEBUG nova.scheduler.client.report [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 775.006381] env[63538]: DEBUG nova.compute.manager [req-92cae3d9-f326-4d66-ba9f-a0f02af8b4ca req-fbaa5b26-a048-47f7-a2b7-0f3533e4031d service nova] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Received event network-vif-deleted-c2833403-d523-4069-b5a5-778e92138ff9 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 775.006716] env[63538]: INFO nova.compute.manager [req-92cae3d9-f326-4d66-ba9f-a0f02af8b4ca req-fbaa5b26-a048-47f7-a2b7-0f3533e4031d service nova] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Neutron deleted interface c2833403-d523-4069-b5a5-778e92138ff9; detaching it from the instance and deleting it from the info cache [ 775.006791] env[63538]: DEBUG nova.network.neutron [req-92cae3d9-f326-4d66-ba9f-a0f02af8b4ca req-fbaa5b26-a048-47f7-a2b7-0f3533e4031d service nova] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.150165] env[63538]: DEBUG oslo_vmware.api [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5100876, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.235048} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.150457] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 775.150654] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 775.150839] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 775.151038] env[63538]: INFO nova.compute.manager [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Took 1.13 seconds to destroy the instance on the hypervisor. [ 775.151404] env[63538]: DEBUG oslo.service.loopingcall [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 775.151737] env[63538]: DEBUG nova.compute.manager [-] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 775.151858] env[63538]: DEBUG nova.network.neutron [-] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 775.466393] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.270s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.469524] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.084s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.471098] env[63538]: INFO nova.compute.claims [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 775.474218] env[63538]: DEBUG nova.network.neutron [-] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.498548] env[63538]: INFO nova.scheduler.client.report [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Deleted allocations for instance fd650fdc-6b49-4051-8267-bbd1f0cb86f1 [ 775.509604] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e2fceb06-1a30-4bc6-ab2f-cda306c85f56 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.525613] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c9fb51-54d3-4bbf-8abf-484ca71237e1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.570616] env[63538]: DEBUG nova.compute.manager [req-92cae3d9-f326-4d66-ba9f-a0f02af8b4ca req-fbaa5b26-a048-47f7-a2b7-0f3533e4031d service nova] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Detach interface failed, port_id=c2833403-d523-4069-b5a5-778e92138ff9, reason: Instance 04dc612b-7987-405b-9716-95c4ff3535ec could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 775.768127] env[63538]: DEBUG nova.virt.hardware [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 775.768127] env[63538]: DEBUG nova.virt.hardware [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 775.768606] env[63538]: DEBUG nova.virt.hardware [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 775.768981] env[63538]: DEBUG nova.virt.hardware [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 775.769306] env[63538]: DEBUG nova.virt.hardware [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 775.769669] env[63538]: DEBUG nova.virt.hardware [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 775.770050] env[63538]: DEBUG nova.virt.hardware [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 775.770365] env[63538]: DEBUG nova.virt.hardware [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 775.770680] env[63538]: DEBUG nova.virt.hardware [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 775.771044] env[63538]: DEBUG nova.virt.hardware [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 775.771410] env[63538]: DEBUG nova.virt.hardware [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 775.772518] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db20762d-9f80-46e4-b51d-650e33bcaa1b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.783786] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dfd5437-5e39-47c7-a3da-f2e1eeee9856 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.802457] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:60:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6edb8eae-1113-49d0-84f7-9fd9f82b26fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8d80ee33-5e67-4651-a9b1-1f58ca92fb2e', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 775.810356] env[63538]: DEBUG oslo.service.loopingcall [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 775.810356] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 775.810356] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c9a2c86a-1348-470a-913f-5347bf8dd456 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.832765] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 775.832765] env[63538]: value = "task-5100877" [ 775.832765] env[63538]: _type = "Task" [ 775.832765] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.842686] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100877, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.900630] env[63538]: DEBUG nova.network.neutron [-] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.978498] env[63538]: INFO nova.compute.manager [-] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Took 1.37 seconds to deallocate network for instance. [ 776.009631] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1743f5b9-23a7-474d-8768-3ad4ee9e8a62 tempest-ServersAdmin275Test-2090274693 tempest-ServersAdmin275Test-2090274693-project-member] Lock "fd650fdc-6b49-4051-8267-bbd1f0cb86f1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.651s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 776.042800] env[63538]: DEBUG oslo_concurrency.lockutils [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.345925] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100877, 'name': CreateVM_Task, 'duration_secs': 0.423372} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.346193] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 776.346924] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.347110] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.347488] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 776.347770] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3bc3e19-e5ec-40c0-960c-6f3462b95ed8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.353784] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 776.353784] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5286c3ec-726f-e463-832b-4ca28a25cc26" [ 776.353784] env[63538]: _type = "Task" [ 776.353784] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.364076] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5286c3ec-726f-e463-832b-4ca28a25cc26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.404106] env[63538]: INFO nova.compute.manager [-] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Took 1.25 seconds to deallocate network for instance. [ 776.488037] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.873721] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5286c3ec-726f-e463-832b-4ca28a25cc26, 'name': SearchDatastore_Task, 'duration_secs': 0.016436} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.874530] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 776.874530] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 776.874818] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.875010] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.875402] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 776.875520] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-15aed41d-6c7f-4a77-a911-ea6550c5594c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.888269] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 776.889190] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 776.890113] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09576be6-cd82-49d6-94b6-8bd10cc3e6c5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.896950] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 776.896950] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524eac7d-c12b-ba45-780c-e61bc0691d7c" [ 776.896950] env[63538]: _type = "Task" [ 776.896950] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.916267] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.917416] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524eac7d-c12b-ba45-780c-e61bc0691d7c, 'name': SearchDatastore_Task, 'duration_secs': 0.01142} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.918280] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9637dc20-c52a-4f7c-8f23-02247fd04c0d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.930314] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 776.930314] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d30da1-43db-ec29-f017-c5912c2460a8" [ 776.930314] env[63538]: _type = "Task" [ 776.930314] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.942313] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d30da1-43db-ec29-f017-c5912c2460a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.020633] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef18ce2d-f7bc-4779-8b06-e2c6118efbe0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.029269] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab22218-9f83-4763-8891-93cb524c7bfb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.036107] env[63538]: DEBUG nova.compute.manager [req-faa26d31-1f7f-4aac-91b5-1dcded2385e2 req-e259374e-35c5-4667-bfd2-9c058ff2c59e service nova] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Received event network-vif-deleted-2e0047f2-712d-4e63-b423-df4605d54382 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 777.063960] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293b7bd5-bbcd-4485-8e6c-306b036ee06e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.073381] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-537bc153-f0c5-40c0-8289-ef5c263243a1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.088426] env[63538]: DEBUG nova.compute.provider_tree [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 777.444964] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d30da1-43db-ec29-f017-c5912c2460a8, 'name': SearchDatastore_Task, 'duration_secs': 0.010328} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.445466] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.445888] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] c8a02fa6-5232-4dde-b6dd-0da1089b6bbf/c8a02fa6-5232-4dde-b6dd-0da1089b6bbf.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 777.446318] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c2d2e1ea-78b2-4895-8bb1-4dac1787f494 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.456571] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 777.456571] env[63538]: value = "task-5100878" [ 777.456571] env[63538]: _type = "Task" [ 777.456571] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.467808] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100878, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.591999] env[63538]: DEBUG nova.scheduler.client.report [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 777.968811] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100878, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.097286] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.628s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.097901] env[63538]: DEBUG nova.compute.manager [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 778.105028] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.873s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.105028] env[63538]: DEBUG nova.objects.instance [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lazy-loading 'resources' on Instance uuid 5421e135-9581-4f81-aa8a-2a604887a1df {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 778.467762] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100878, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.722513} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.468745] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] c8a02fa6-5232-4dde-b6dd-0da1089b6bbf/c8a02fa6-5232-4dde-b6dd-0da1089b6bbf.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 778.469031] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 778.469320] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bb4dfb30-d5fe-4a98-b25e-72c78049fc1c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.479899] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 778.479899] env[63538]: value = "task-5100879" [ 778.479899] env[63538]: _type = "Task" [ 778.479899] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.489222] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100879, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.606262] env[63538]: DEBUG nova.compute.utils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 778.610934] env[63538]: DEBUG nova.compute.manager [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 778.620314] env[63538]: DEBUG nova.network.neutron [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 778.917752] env[63538]: DEBUG nova.policy [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '577a52928adf4587b963772b31a378cc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5d463d24e41b421eb7cb9d51ad207495', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 778.992950] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100879, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072437} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.993292] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 778.997639] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ca83f59-d4bf-416d-95a6-4fb0739c90b6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.025188] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] c8a02fa6-5232-4dde-b6dd-0da1089b6bbf/c8a02fa6-5232-4dde-b6dd-0da1089b6bbf.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 779.028379] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a51ca03d-5d54-4893-bc4b-4093fb566328 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.054515] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 779.054515] env[63538]: value = "task-5100880" [ 779.054515] env[63538]: _type = "Task" [ 779.054515] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.070989] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100880, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.112444] env[63538]: DEBUG nova.compute.manager [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 779.232560] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8cfc575-a700-47c7-bee8-31de5aae2402 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.241566] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f00227d-7dde-41d6-978f-37089ebad022 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.273590] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd23e3d-23d4-49db-ae31-4d06c130fc56 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.285095] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869c6bb1-7ec1-403c-8f3b-b5b975a8e3d7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.305222] env[63538]: DEBUG nova.compute.provider_tree [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.568720] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100880, 'name': ReconfigVM_Task, 'duration_secs': 0.331849} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.568973] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Reconfigured VM instance instance-00000006 to attach disk [datastore1] c8a02fa6-5232-4dde-b6dd-0da1089b6bbf/c8a02fa6-5232-4dde-b6dd-0da1089b6bbf.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 779.572414] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bff2d5d1-8220-4a15-b0fb-6cb26ba5f866 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.580708] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 779.580708] env[63538]: value = "task-5100881" [ 779.580708] env[63538]: _type = "Task" [ 779.580708] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.596804] env[63538]: DEBUG nova.network.neutron [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Successfully created port: 07400758-b7cb-4bba-9927-f1576e625a45 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 779.598952] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100881, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.809796] env[63538]: DEBUG nova.scheduler.client.report [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 780.099306] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100881, 'name': Rename_Task, 'duration_secs': 0.267714} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.103893] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 780.103893] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-046273c0-7f25-4fb3-b312-cbe2f4e28e51 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.113445] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 780.113445] env[63538]: value = "task-5100882" [ 780.113445] env[63538]: _type = "Task" [ 780.113445] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.127671] env[63538]: DEBUG nova.compute.manager [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 780.130167] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100882, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.167118] env[63538]: DEBUG nova.virt.hardware [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 780.167444] env[63538]: DEBUG nova.virt.hardware [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 780.167605] env[63538]: DEBUG nova.virt.hardware [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 780.167806] env[63538]: DEBUG nova.virt.hardware [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 780.167959] env[63538]: DEBUG nova.virt.hardware [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 780.168147] env[63538]: DEBUG nova.virt.hardware [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 780.168366] env[63538]: DEBUG nova.virt.hardware [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 780.168544] env[63538]: DEBUG nova.virt.hardware [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 780.169029] env[63538]: DEBUG nova.virt.hardware [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 780.169029] env[63538]: DEBUG nova.virt.hardware [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 780.169143] env[63538]: DEBUG nova.virt.hardware [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 780.170033] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd30f11-d300-485d-9cfe-05c4e1c6382e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.180125] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f524f0dd-8694-4c81-8571-cb4771886c96 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.325109] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.223s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.328435] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.306s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.331839] env[63538]: INFO nova.compute.claims [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 780.369117] env[63538]: INFO nova.scheduler.client.report [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Deleted allocations for instance 5421e135-9581-4f81-aa8a-2a604887a1df [ 780.623482] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100882, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.881817] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9092b2cc-f2d5-4db0-b5b5-6fde5507fb22 tempest-ListServersNegativeTestJSON-685275572 tempest-ListServersNegativeTestJSON-685275572-project-member] Lock "5421e135-9581-4f81-aa8a-2a604887a1df" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.522s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.945484] env[63538]: DEBUG oslo_vmware.rw_handles [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c525bc-983a-b0f6-f74e-ba949242e4fc/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 780.946575] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05fd6da3-bb23-40de-b21d-8932d2ac31d2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.955621] env[63538]: DEBUG oslo_vmware.rw_handles [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c525bc-983a-b0f6-f74e-ba949242e4fc/disk-0.vmdk is in state: ready. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 780.955761] env[63538]: ERROR oslo_vmware.rw_handles [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c525bc-983a-b0f6-f74e-ba949242e4fc/disk-0.vmdk due to incomplete transfer. [ 780.956101] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e82e820e-ddbc-4f8c-80a8-633e229f408c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.964787] env[63538]: DEBUG oslo_vmware.rw_handles [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c525bc-983a-b0f6-f74e-ba949242e4fc/disk-0.vmdk. {{(pid=63538) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 780.965061] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Uploaded image d24e9e72-5908-4cc3-b92f-fe3db218cf40 to the Glance image server {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 780.966952] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Destroying the VM {{(pid=63538) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 780.967286] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-833bc7a9-d39a-40cc-9758-786db1b19992 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.976112] env[63538]: DEBUG oslo_vmware.api [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Waiting for the task: (returnval){ [ 780.976112] env[63538]: value = "task-5100883" [ 780.976112] env[63538]: _type = "Task" [ 780.976112] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.986834] env[63538]: DEBUG oslo_vmware.api [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100883, 'name': Destroy_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.126008] env[63538]: DEBUG oslo_vmware.api [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100882, 'name': PowerOnVM_Task, 'duration_secs': 0.642235} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.126533] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 781.126615] env[63538]: DEBUG nova.compute.manager [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 781.127673] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec61df6c-01a2-4e13-99c0-8faf94ebf80f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.489168] env[63538]: DEBUG oslo_vmware.api [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100883, 'name': Destroy_Task, 'duration_secs': 0.362581} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.489502] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Destroyed the VM [ 781.489741] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Deleting Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 781.490001] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-19d65c76-5360-4b07-8200-a8380eef4871 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.501022] env[63538]: DEBUG oslo_vmware.api [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Waiting for the task: (returnval){ [ 781.501022] env[63538]: value = "task-5100884" [ 781.501022] env[63538]: _type = "Task" [ 781.501022] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.510876] env[63538]: DEBUG oslo_vmware.api [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100884, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.607491] env[63538]: DEBUG nova.network.neutron [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Successfully updated port: 07400758-b7cb-4bba-9927-f1576e625a45 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 781.659468] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.691354] env[63538]: DEBUG nova.compute.manager [req-edfa7188-dda7-4a53-a014-4c4791f8a140 req-3163f98d-aec8-4f06-96ec-4cc8861632e0 service nova] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Received event network-vif-plugged-07400758-b7cb-4bba-9927-f1576e625a45 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 781.691652] env[63538]: DEBUG oslo_concurrency.lockutils [req-edfa7188-dda7-4a53-a014-4c4791f8a140 req-3163f98d-aec8-4f06-96ec-4cc8861632e0 service nova] Acquiring lock "e4b94aa7-7434-4a6e-b6d3-ed02315c435f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.691921] env[63538]: DEBUG oslo_concurrency.lockutils [req-edfa7188-dda7-4a53-a014-4c4791f8a140 req-3163f98d-aec8-4f06-96ec-4cc8861632e0 service nova] Lock "e4b94aa7-7434-4a6e-b6d3-ed02315c435f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.692783] env[63538]: DEBUG oslo_concurrency.lockutils [req-edfa7188-dda7-4a53-a014-4c4791f8a140 req-3163f98d-aec8-4f06-96ec-4cc8861632e0 service nova] Lock "e4b94aa7-7434-4a6e-b6d3-ed02315c435f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.693032] env[63538]: DEBUG nova.compute.manager [req-edfa7188-dda7-4a53-a014-4c4791f8a140 req-3163f98d-aec8-4f06-96ec-4cc8861632e0 service nova] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] No waiting events found dispatching network-vif-plugged-07400758-b7cb-4bba-9927-f1576e625a45 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 781.693284] env[63538]: WARNING nova.compute.manager [req-edfa7188-dda7-4a53-a014-4c4791f8a140 req-3163f98d-aec8-4f06-96ec-4cc8861632e0 service nova] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Received unexpected event network-vif-plugged-07400758-b7cb-4bba-9927-f1576e625a45 for instance with vm_state building and task_state spawning. [ 781.898322] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4af08b1-0459-459d-b164-275ea84693df {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.909453] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e37d0990-f1c6-4a73-bdac-6b2cfcfdf92a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.947174] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-866330a6-9e73-4c58-ade4-7e63f2d95674 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.956921] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f43feb5-1099-4b2f-b651-c11eaf33c4a4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.976915] env[63538]: DEBUG nova.compute.provider_tree [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 782.013092] env[63538]: DEBUG oslo_vmware.api [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100884, 'name': RemoveSnapshot_Task} progress is 15%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.113244] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "refresh_cache-e4b94aa7-7434-4a6e-b6d3-ed02315c435f" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 782.113385] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquired lock "refresh_cache-e4b94aa7-7434-4a6e-b6d3-ed02315c435f" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.113542] env[63538]: DEBUG nova.network.neutron [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 782.481843] env[63538]: DEBUG nova.scheduler.client.report [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 782.518194] env[63538]: DEBUG oslo_vmware.api [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100884, 'name': RemoveSnapshot_Task, 'duration_secs': 0.886466} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.518194] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Deleted Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 782.518194] env[63538]: INFO nova.compute.manager [None req-8eb8da01-0b06-428a-afef-c4b60e872657 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Took 14.77 seconds to snapshot the instance on the hypervisor. [ 782.688798] env[63538]: DEBUG nova.network.neutron [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 782.868022] env[63538]: DEBUG nova.network.neutron [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Updating instance_info_cache with network_info: [{"id": "07400758-b7cb-4bba-9927-f1576e625a45", "address": "fa:16:3e:6c:e3:05", "network": {"id": "106e253d-ba43-4eb6-a423-ed6a6d4156aa", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-330603874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d463d24e41b421eb7cb9d51ad207495", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07400758-b7", "ovs_interfaceid": "07400758-b7cb-4bba-9927-f1576e625a45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.990162] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.661s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.990509] env[63538]: DEBUG nova.compute.manager [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 782.994471] env[63538]: DEBUG oslo_concurrency.lockutils [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.838s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.994707] env[63538]: DEBUG nova.objects.instance [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Lazy-loading 'resources' on Instance uuid 080b11d7-a756-45a0-81d5-b5fcc2662ac9 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 783.370560] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Releasing lock "refresh_cache-e4b94aa7-7434-4a6e-b6d3-ed02315c435f" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 783.370992] env[63538]: DEBUG nova.compute.manager [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Instance network_info: |[{"id": "07400758-b7cb-4bba-9927-f1576e625a45", "address": "fa:16:3e:6c:e3:05", "network": {"id": "106e253d-ba43-4eb6-a423-ed6a6d4156aa", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-330603874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d463d24e41b421eb7cb9d51ad207495", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07400758-b7", "ovs_interfaceid": "07400758-b7cb-4bba-9927-f1576e625a45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 783.371476] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6c:e3:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bec903a9-d773-4d7c-a80c-c2533be346fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '07400758-b7cb-4bba-9927-f1576e625a45', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 783.380715] env[63538]: DEBUG oslo.service.loopingcall [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 783.380993] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 783.381701] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-62c0f825-f3e6-4136-90f3-6b80877c000a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.408950] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 783.408950] env[63538]: value = "task-5100885" [ 783.408950] env[63538]: _type = "Task" [ 783.408950] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.418486] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100885, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.496722] env[63538]: DEBUG nova.compute.utils [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 783.501075] env[63538]: DEBUG nova.compute.manager [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 783.501463] env[63538]: DEBUG nova.network.neutron [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 783.625409] env[63538]: DEBUG nova.policy [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9c4dcefd65de48a582ffb683637bda94', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3dc18da1ea704eeaaeb62633c4f76ee8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 783.924913] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100885, 'name': CreateVM_Task, 'duration_secs': 0.390213} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.925067] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 783.925801] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 783.926307] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.926397] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 783.926615] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf529839-52cf-42af-9278-06f0c29b7fab {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.932984] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 783.932984] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cdfd2d-012b-c1fd-571e-e3a07e47fb39" [ 783.932984] env[63538]: _type = "Task" [ 783.932984] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.946163] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "e50e95c0-830b-4d71-999b-546b138bf8f4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.946413] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "e50e95c0-830b-4d71-999b-546b138bf8f4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.946622] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "e50e95c0-830b-4d71-999b-546b138bf8f4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.946812] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "e50e95c0-830b-4d71-999b-546b138bf8f4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.946988] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "e50e95c0-830b-4d71-999b-546b138bf8f4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.948700] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cdfd2d-012b-c1fd-571e-e3a07e47fb39, 'name': SearchDatastore_Task, 'duration_secs': 0.01027} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.949299] env[63538]: INFO nova.compute.manager [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Terminating instance [ 783.950729] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 783.950960] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 783.951198] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 783.951352] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.951528] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 783.952184] env[63538]: DEBUG nova.compute.manager [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 783.952433] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 783.952611] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a8657c3-808a-4b6a-b158-c028c4931219 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.956012] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8a2ff17-04d8-4463-952e-570e16957992 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.968652] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 783.970390] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3a1436ba-3d1f-43e7-bd45-ed62d4970b89 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.972250] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 783.972436] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 783.975733] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa502081-db1e-4b3d-901a-b8d8a2ef5a45 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.987650] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 783.987650] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524dbbe9-be86-4135-625d-57d774fcec2f" [ 783.987650] env[63538]: _type = "Task" [ 783.987650] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.990474] env[63538]: DEBUG oslo_vmware.api [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 783.990474] env[63538]: value = "task-5100886" [ 783.990474] env[63538]: _type = "Task" [ 783.990474] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.997793] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524dbbe9-be86-4135-625d-57d774fcec2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.009959] env[63538]: DEBUG nova.compute.manager [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 784.010878] env[63538]: DEBUG oslo_vmware.api [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100886, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.115648] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9fac030-212d-4e40-b190-a9c895859aeb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.130149] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0f41c3-5f86-49b5-b3f8-11f2d9dff687 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.170030] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26932a64-26da-4a2c-8eca-eab27c355cab {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.179048] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b7d338b-152c-48c6-b6b1-37ee200088d0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.195809] env[63538]: DEBUG nova.compute.provider_tree [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 784.234976] env[63538]: DEBUG nova.compute.manager [req-6c0a05a8-b37d-4b30-9616-6c9d53f406dd req-65541cf3-349b-44f7-bc33-b414f13debbc service nova] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Received event network-changed-07400758-b7cb-4bba-9927-f1576e625a45 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 784.235222] env[63538]: DEBUG nova.compute.manager [req-6c0a05a8-b37d-4b30-9616-6c9d53f406dd req-65541cf3-349b-44f7-bc33-b414f13debbc service nova] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Refreshing instance network info cache due to event network-changed-07400758-b7cb-4bba-9927-f1576e625a45. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 784.235436] env[63538]: DEBUG oslo_concurrency.lockutils [req-6c0a05a8-b37d-4b30-9616-6c9d53f406dd req-65541cf3-349b-44f7-bc33-b414f13debbc service nova] Acquiring lock "refresh_cache-e4b94aa7-7434-4a6e-b6d3-ed02315c435f" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 784.235584] env[63538]: DEBUG oslo_concurrency.lockutils [req-6c0a05a8-b37d-4b30-9616-6c9d53f406dd req-65541cf3-349b-44f7-bc33-b414f13debbc service nova] Acquired lock "refresh_cache-e4b94aa7-7434-4a6e-b6d3-ed02315c435f" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.236165] env[63538]: DEBUG nova.network.neutron [req-6c0a05a8-b37d-4b30-9616-6c9d53f406dd req-65541cf3-349b-44f7-bc33-b414f13debbc service nova] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Refreshing network info cache for port 07400758-b7cb-4bba-9927-f1576e625a45 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 784.270109] env[63538]: DEBUG nova.network.neutron [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Successfully created port: 11d7dbc5-d269-456b-9a7a-601759e64b51 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 784.495326] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524dbbe9-be86-4135-625d-57d774fcec2f, 'name': SearchDatastore_Task, 'duration_secs': 0.01325} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.499347] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13d1d2ce-9959-40de-b355-c827018abdc2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.508270] env[63538]: DEBUG oslo_vmware.api [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100886, 'name': PowerOffVM_Task, 'duration_secs': 0.193974} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.508576] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 784.508576] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5281c5ce-da23-6471-1fd6-4407a1aea745" [ 784.508576] env[63538]: _type = "Task" [ 784.508576] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.509171] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 784.509358] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 784.509656] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b61d5a40-6ac0-4b5c-a678-6c0ee232426b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.523509] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5281c5ce-da23-6471-1fd6-4407a1aea745, 'name': SearchDatastore_Task, 'duration_secs': 0.009569} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.523808] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 784.524081] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] e4b94aa7-7434-4a6e-b6d3-ed02315c435f/e4b94aa7-7434-4a6e-b6d3-ed02315c435f.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 784.524363] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-db5a70a8-479f-442a-bbac-28a0d2561974 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.531572] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 784.531572] env[63538]: value = "task-5100888" [ 784.531572] env[63538]: _type = "Task" [ 784.531572] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.542071] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100888, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.587578] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 784.587758] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 784.587946] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Deleting the datastore file [datastore1] e50e95c0-830b-4d71-999b-546b138bf8f4 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 784.588246] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a15916af-dcf2-478e-96d1-68ce62dd0666 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.596696] env[63538]: DEBUG oslo_vmware.api [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 784.596696] env[63538]: value = "task-5100889" [ 784.596696] env[63538]: _type = "Task" [ 784.596696] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.605437] env[63538]: DEBUG oslo_vmware.api [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100889, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.699719] env[63538]: DEBUG nova.scheduler.client.report [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 785.022032] env[63538]: DEBUG nova.compute.manager [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 785.046177] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100888, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.058185] env[63538]: DEBUG nova.virt.hardware [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:52:22Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='2a0f5711-293c-4327-a7c3-091f85550bf8',id=39,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-805814405',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 785.058185] env[63538]: DEBUG nova.virt.hardware [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 785.058185] env[63538]: DEBUG nova.virt.hardware [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 785.058715] env[63538]: DEBUG nova.virt.hardware [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 785.059060] env[63538]: DEBUG nova.virt.hardware [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 785.059355] env[63538]: DEBUG nova.virt.hardware [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 785.059703] env[63538]: DEBUG nova.virt.hardware [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 785.060027] env[63538]: DEBUG nova.virt.hardware [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 785.060331] env[63538]: DEBUG nova.virt.hardware [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 785.060620] env[63538]: DEBUG nova.virt.hardware [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 785.060941] env[63538]: DEBUG nova.virt.hardware [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 785.061976] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d24a315-d2d7-4181-bb57-278c13fec62d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.070982] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-450b1704-2d58-461c-ab6b-eae973710a49 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.111759] env[63538]: DEBUG oslo_vmware.api [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5100889, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.45024} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.112327] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 785.112666] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 785.113746] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 785.113746] env[63538]: INFO nova.compute.manager [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Took 1.16 seconds to destroy the instance on the hypervisor. [ 785.113746] env[63538]: DEBUG oslo.service.loopingcall [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 785.114194] env[63538]: DEBUG nova.compute.manager [-] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 785.114397] env[63538]: DEBUG nova.network.neutron [-] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 785.210280] env[63538]: DEBUG oslo_concurrency.lockutils [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.213s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.212825] env[63538]: DEBUG oslo_concurrency.lockutils [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.481s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.213290] env[63538]: DEBUG nova.objects.instance [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lazy-loading 'resources' on Instance uuid a7bb1869-5553-40d8-9c0b-366ccdef5fae {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 785.239170] env[63538]: INFO nova.scheduler.client.report [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Deleted allocations for instance 080b11d7-a756-45a0-81d5-b5fcc2662ac9 [ 785.289867] env[63538]: DEBUG nova.network.neutron [req-6c0a05a8-b37d-4b30-9616-6c9d53f406dd req-65541cf3-349b-44f7-bc33-b414f13debbc service nova] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Updated VIF entry in instance network info cache for port 07400758-b7cb-4bba-9927-f1576e625a45. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 785.291135] env[63538]: DEBUG nova.network.neutron [req-6c0a05a8-b37d-4b30-9616-6c9d53f406dd req-65541cf3-349b-44f7-bc33-b414f13debbc service nova] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Updating instance_info_cache with network_info: [{"id": "07400758-b7cb-4bba-9927-f1576e625a45", "address": "fa:16:3e:6c:e3:05", "network": {"id": "106e253d-ba43-4eb6-a423-ed6a6d4156aa", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-330603874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d463d24e41b421eb7cb9d51ad207495", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07400758-b7", "ovs_interfaceid": "07400758-b7cb-4bba-9927-f1576e625a45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.543050] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100888, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.635571} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.543496] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] e4b94aa7-7434-4a6e-b6d3-ed02315c435f/e4b94aa7-7434-4a6e-b6d3-ed02315c435f.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 785.543640] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 785.543822] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cfe1fe22-fca7-406e-b6d1-3768ea428023 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.556144] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 785.556144] env[63538]: value = "task-5100890" [ 785.556144] env[63538]: _type = "Task" [ 785.556144] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.566353] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100890, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.672260] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Acquiring lock "4e89aa25-fb4a-430d-ab87-feff57b73780" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.673023] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Lock "4e89aa25-fb4a-430d-ab87-feff57b73780" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.673023] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Acquiring lock "4e89aa25-fb4a-430d-ab87-feff57b73780-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.673023] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Lock "4e89aa25-fb4a-430d-ab87-feff57b73780-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.674608] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Lock "4e89aa25-fb4a-430d-ab87-feff57b73780-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.676053] env[63538]: INFO nova.compute.manager [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Terminating instance [ 785.677912] env[63538]: DEBUG nova.compute.manager [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 785.678120] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 785.678952] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-526775e3-7553-4518-a6d3-00128e6f7d88 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.688065] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 785.688333] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c43fae5-c519-42f2-abda-b1a429a5291a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.696471] env[63538]: DEBUG oslo_vmware.api [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Waiting for the task: (returnval){ [ 785.696471] env[63538]: value = "task-5100891" [ 785.696471] env[63538]: _type = "Task" [ 785.696471] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.706836] env[63538]: DEBUG oslo_vmware.api [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100891, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.751140] env[63538]: DEBUG oslo_concurrency.lockutils [None req-eebf0010-e518-4a39-99a1-b9a8c4c6683a tempest-ServerAddressesTestJSON-1632524142 tempest-ServerAddressesTestJSON-1632524142-project-member] Lock "080b11d7-a756-45a0-81d5-b5fcc2662ac9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.980s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.783441] env[63538]: DEBUG nova.compute.manager [req-c7046bb3-1c0c-4592-a1db-e490dd774401 req-697f7440-ea2a-4137-82a0-58af8afb595e service nova] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Received event network-vif-deleted-facecb08-5587-4113-9009-ad339833d9ab {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 785.783696] env[63538]: INFO nova.compute.manager [req-c7046bb3-1c0c-4592-a1db-e490dd774401 req-697f7440-ea2a-4137-82a0-58af8afb595e service nova] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Neutron deleted interface facecb08-5587-4113-9009-ad339833d9ab; detaching it from the instance and deleting it from the info cache [ 785.783997] env[63538]: DEBUG nova.network.neutron [req-c7046bb3-1c0c-4592-a1db-e490dd774401 req-697f7440-ea2a-4137-82a0-58af8afb595e service nova] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.794405] env[63538]: DEBUG oslo_concurrency.lockutils [req-6c0a05a8-b37d-4b30-9616-6c9d53f406dd req-65541cf3-349b-44f7-bc33-b414f13debbc service nova] Releasing lock "refresh_cache-e4b94aa7-7434-4a6e-b6d3-ed02315c435f" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 785.989394] env[63538]: DEBUG nova.network.neutron [-] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.071134] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100890, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070573} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.071429] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 786.072291] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb4ecd5-a5aa-43bc-9e35-77b9c3dd16a2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.101330] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] e4b94aa7-7434-4a6e-b6d3-ed02315c435f/e4b94aa7-7434-4a6e-b6d3-ed02315c435f.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 786.105356] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3dad512e-d62c-4ef1-b2b9-3a51e343d71c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.130085] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 786.130085] env[63538]: value = "task-5100892" [ 786.130085] env[63538]: _type = "Task" [ 786.130085] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.142739] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100892, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.211126] env[63538]: DEBUG oslo_vmware.api [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100891, 'name': PowerOffVM_Task, 'duration_secs': 0.246768} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.211995] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 786.211995] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 786.211995] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e2d63d9f-4748-47dc-bbf2-9b1b4f262091 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.275734] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 786.275983] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 786.276233] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Deleting the datastore file [datastore1] 4e89aa25-fb4a-430d-ab87-feff57b73780 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 786.276520] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-65ca73dc-2546-412e-90b8-80402fddd827 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.290093] env[63538]: DEBUG oslo_vmware.api [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Waiting for the task: (returnval){ [ 786.290093] env[63538]: value = "task-5100894" [ 786.290093] env[63538]: _type = "Task" [ 786.290093] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.290093] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-58b961b8-8a31-4773-a42b-530e5beb3141 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.299386] env[63538]: DEBUG oslo_vmware.api [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100894, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.303613] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f070d06-9dee-46e9-a62b-7cf9d455916c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.317786] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73fe10c9-cd29-4359-8d7b-02351adca98c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.325834] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479a9383-8d3b-450a-b2a5-615f88abc6b9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.344359] env[63538]: DEBUG nova.compute.manager [req-c7046bb3-1c0c-4592-a1db-e490dd774401 req-697f7440-ea2a-4137-82a0-58af8afb595e service nova] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Detach interface failed, port_id=facecb08-5587-4113-9009-ad339833d9ab, reason: Instance e50e95c0-830b-4d71-999b-546b138bf8f4 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 786.373755] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c8e9f0-37e6-46ee-8b9b-5be0006c97dc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.382974] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8de1c70-8853-4571-a328-b1d572c97dc3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.399026] env[63538]: DEBUG nova.compute.provider_tree [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 786.494040] env[63538]: INFO nova.compute.manager [-] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Took 1.38 seconds to deallocate network for instance. [ 786.643172] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100892, 'name': ReconfigVM_Task, 'duration_secs': 0.308474} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.643172] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Reconfigured VM instance instance-00000032 to attach disk [datastore1] e4b94aa7-7434-4a6e-b6d3-ed02315c435f/e4b94aa7-7434-4a6e-b6d3-ed02315c435f.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 786.643172] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6287cd9a-a84f-4600-9451-3293f7d2928f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.650507] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 786.650507] env[63538]: value = "task-5100895" [ 786.650507] env[63538]: _type = "Task" [ 786.650507] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.666203] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100895, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.754217] env[63538]: DEBUG nova.compute.manager [req-a0f34ec8-ba5e-41b0-b054-1645895547f3 req-f10093ff-a283-4c05-a2d9-17737dd23393 service nova] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Received event network-vif-plugged-11d7dbc5-d269-456b-9a7a-601759e64b51 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 786.754667] env[63538]: DEBUG oslo_concurrency.lockutils [req-a0f34ec8-ba5e-41b0-b054-1645895547f3 req-f10093ff-a283-4c05-a2d9-17737dd23393 service nova] Acquiring lock "a2e036ae-318b-44ea-9db0-10fa3838728b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.755119] env[63538]: DEBUG oslo_concurrency.lockutils [req-a0f34ec8-ba5e-41b0-b054-1645895547f3 req-f10093ff-a283-4c05-a2d9-17737dd23393 service nova] Lock "a2e036ae-318b-44ea-9db0-10fa3838728b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.755488] env[63538]: DEBUG oslo_concurrency.lockutils [req-a0f34ec8-ba5e-41b0-b054-1645895547f3 req-f10093ff-a283-4c05-a2d9-17737dd23393 service nova] Lock "a2e036ae-318b-44ea-9db0-10fa3838728b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.755802] env[63538]: DEBUG nova.compute.manager [req-a0f34ec8-ba5e-41b0-b054-1645895547f3 req-f10093ff-a283-4c05-a2d9-17737dd23393 service nova] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] No waiting events found dispatching network-vif-plugged-11d7dbc5-d269-456b-9a7a-601759e64b51 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 786.756125] env[63538]: WARNING nova.compute.manager [req-a0f34ec8-ba5e-41b0-b054-1645895547f3 req-f10093ff-a283-4c05-a2d9-17737dd23393 service nova] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Received unexpected event network-vif-plugged-11d7dbc5-d269-456b-9a7a-601759e64b51 for instance with vm_state building and task_state spawning. [ 786.801059] env[63538]: DEBUG oslo_vmware.api [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Task: {'id': task-5100894, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169472} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.801395] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 786.801603] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 786.801818] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 786.801996] env[63538]: INFO nova.compute.manager [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Took 1.12 seconds to destroy the instance on the hypervisor. [ 786.802267] env[63538]: DEBUG oslo.service.loopingcall [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 786.802617] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 786.802869] env[63538]: DEBUG nova.compute.manager [-] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 786.803209] env[63538]: DEBUG nova.network.neutron [-] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 786.805054] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 786.902602] env[63538]: DEBUG nova.scheduler.client.report [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 786.970239] env[63538]: DEBUG nova.network.neutron [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Successfully updated port: 11d7dbc5-d269-456b-9a7a-601759e64b51 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 787.000555] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.170834] env[63538]: DEBUG oslo_concurrency.lockutils [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Acquiring lock "736b110e-7265-42cc-9c9b-35f57c466b0c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.171156] env[63538]: DEBUG oslo_concurrency.lockutils [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Lock "736b110e-7265-42cc-9c9b-35f57c466b0c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.171367] env[63538]: DEBUG oslo_concurrency.lockutils [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Acquiring lock "736b110e-7265-42cc-9c9b-35f57c466b0c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.171585] env[63538]: DEBUG oslo_concurrency.lockutils [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Lock "736b110e-7265-42cc-9c9b-35f57c466b0c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.171827] env[63538]: DEBUG oslo_concurrency.lockutils [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Lock "736b110e-7265-42cc-9c9b-35f57c466b0c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.173910] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100895, 'name': Rename_Task, 'duration_secs': 0.158476} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.178206] env[63538]: INFO nova.compute.manager [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Terminating instance [ 787.180588] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 787.180782] env[63538]: DEBUG nova.compute.manager [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 787.180903] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 787.181154] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dc4523f7-ea42-4e77-82e4-7ebafa843beb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.184066] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b9de0e4-6cd0-4545-8a23-789ae30af690 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.192290] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 787.194479] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-57d14013-0338-4d76-a6c2-feb572aa41b5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.196291] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 787.196291] env[63538]: value = "task-5100896" [ 787.196291] env[63538]: _type = "Task" [ 787.196291] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.205843] env[63538]: DEBUG oslo_vmware.api [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Waiting for the task: (returnval){ [ 787.205843] env[63538]: value = "task-5100897" [ 787.205843] env[63538]: _type = "Task" [ 787.205843] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.214039] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100896, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.220097] env[63538]: DEBUG oslo_vmware.api [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Task: {'id': task-5100897, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.325864] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 787.325864] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Starting heal instance info cache {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10017}} [ 787.408475] env[63538]: DEBUG oslo_concurrency.lockutils [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.195s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.411769] env[63538]: DEBUG oslo_concurrency.lockutils [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.583s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.413416] env[63538]: INFO nova.compute.claims [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 787.445953] env[63538]: INFO nova.scheduler.client.report [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Deleted allocations for instance a7bb1869-5553-40d8-9c0b-366ccdef5fae [ 787.476403] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "refresh_cache-a2e036ae-318b-44ea-9db0-10fa3838728b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.476403] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquired lock "refresh_cache-a2e036ae-318b-44ea-9db0-10fa3838728b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.476403] env[63538]: DEBUG nova.network.neutron [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 787.663889] env[63538]: DEBUG nova.network.neutron [-] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.711036] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100896, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.724409] env[63538]: DEBUG oslo_vmware.api [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Task: {'id': task-5100897, 'name': PowerOffVM_Task, 'duration_secs': 0.47965} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.724409] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 787.724409] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 787.724409] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-09121a5e-b53a-45b0-9c13-d9f880e831b3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.792574] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 787.792954] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 787.794289] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Deleting the datastore file [datastore1] 736b110e-7265-42cc-9c9b-35f57c466b0c {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 787.794289] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7185fe25-d885-4588-aa43-9e04fc3e60f7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.802949] env[63538]: DEBUG oslo_vmware.api [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Waiting for the task: (returnval){ [ 787.802949] env[63538]: value = "task-5100899" [ 787.802949] env[63538]: _type = "Task" [ 787.802949] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.820387] env[63538]: DEBUG oslo_vmware.api [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Task: {'id': task-5100899, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.864156] env[63538]: DEBUG nova.compute.manager [req-20ed2ebb-8767-44f1-83a3-043896c8319a req-e371a1aa-befe-4e46-8f65-a9a87b0d4d52 service nova] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Received event network-vif-deleted-61a2d4d3-83f2-4f4e-8217-ad2abe2975b9 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 787.898178] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "refresh_cache-2e1b0bc7-3909-48e2-b9be-26822a57ee67" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.898178] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquired lock "refresh_cache-2e1b0bc7-3909-48e2-b9be-26822a57ee67" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.898178] env[63538]: DEBUG nova.network.neutron [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Forcefully refreshing network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 787.956337] env[63538]: DEBUG oslo_concurrency.lockutils [None req-041e8cc6-154c-481d-9705-8e473ac8e0bf tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "a7bb1869-5553-40d8-9c0b-366ccdef5fae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.447s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.050634] env[63538]: DEBUG nova.network.neutron [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 788.167184] env[63538]: INFO nova.compute.manager [-] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Took 1.36 seconds to deallocate network for instance. [ 788.215601] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100896, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.252618] env[63538]: DEBUG nova.network.neutron [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Updating instance_info_cache with network_info: [{"id": "11d7dbc5-d269-456b-9a7a-601759e64b51", "address": "fa:16:3e:45:35:dd", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.81", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11d7dbc5-d2", "ovs_interfaceid": "11d7dbc5-d269-456b-9a7a-601759e64b51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.314869] env[63538]: DEBUG oslo_vmware.api [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Task: {'id': task-5100899, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167418} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.315158] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 788.315368] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 788.315555] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 788.315744] env[63538]: INFO nova.compute.manager [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Took 1.13 seconds to destroy the instance on the hypervisor. [ 788.316015] env[63538]: DEBUG oslo.service.loopingcall [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 788.316250] env[63538]: DEBUG nova.compute.manager [-] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 788.316333] env[63538]: DEBUG nova.network.neutron [-] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 788.678355] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.711982] env[63538]: DEBUG oslo_vmware.api [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100896, 'name': PowerOnVM_Task, 'duration_secs': 1.041764} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.712961] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 788.713243] env[63538]: INFO nova.compute.manager [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Took 8.59 seconds to spawn the instance on the hypervisor. [ 788.717208] env[63538]: DEBUG nova.compute.manager [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 788.717208] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d525d1-018c-451c-9ff0-ec9c835f5e41 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.763096] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Releasing lock "refresh_cache-a2e036ae-318b-44ea-9db0-10fa3838728b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.763576] env[63538]: DEBUG nova.compute.manager [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Instance network_info: |[{"id": "11d7dbc5-d269-456b-9a7a-601759e64b51", "address": "fa:16:3e:45:35:dd", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.81", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11d7dbc5-d2", "ovs_interfaceid": "11d7dbc5-d269-456b-9a7a-601759e64b51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 788.764857] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:35:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24144f5a-050a-4f1e-8d8c-774dc16dc791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '11d7dbc5-d269-456b-9a7a-601759e64b51', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 788.779564] env[63538]: DEBUG oslo.service.loopingcall [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 788.780062] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 788.780286] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9c65b38e-e341-4070-bb64-f697d0e38f04 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.818353] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 788.818353] env[63538]: value = "task-5100900" [ 788.818353] env[63538]: _type = "Task" [ 788.818353] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.827292] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100900, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.013335] env[63538]: DEBUG nova.compute.manager [req-33b687eb-b92b-4ece-b5c7-527c8de51b0d req-13d4f91a-0a30-4699-9cfd-5f2cde91135b service nova] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Received event network-changed-11d7dbc5-d269-456b-9a7a-601759e64b51 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 789.013541] env[63538]: DEBUG nova.compute.manager [req-33b687eb-b92b-4ece-b5c7-527c8de51b0d req-13d4f91a-0a30-4699-9cfd-5f2cde91135b service nova] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Refreshing instance network info cache due to event network-changed-11d7dbc5-d269-456b-9a7a-601759e64b51. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 789.013776] env[63538]: DEBUG oslo_concurrency.lockutils [req-33b687eb-b92b-4ece-b5c7-527c8de51b0d req-13d4f91a-0a30-4699-9cfd-5f2cde91135b service nova] Acquiring lock "refresh_cache-a2e036ae-318b-44ea-9db0-10fa3838728b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 789.014472] env[63538]: DEBUG oslo_concurrency.lockutils [req-33b687eb-b92b-4ece-b5c7-527c8de51b0d req-13d4f91a-0a30-4699-9cfd-5f2cde91135b service nova] Acquired lock "refresh_cache-a2e036ae-318b-44ea-9db0-10fa3838728b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.014472] env[63538]: DEBUG nova.network.neutron [req-33b687eb-b92b-4ece-b5c7-527c8de51b0d req-13d4f91a-0a30-4699-9cfd-5f2cde91135b service nova] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Refreshing network info cache for port 11d7dbc5-d269-456b-9a7a-601759e64b51 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 789.037613] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641a00d1-8f0b-4b8c-8868-2853da075bb8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.047369] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd18f59-de4b-4489-84d2-78cac96ffac7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.089437] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ff88af3-9340-4ac3-b31f-ee1e96ceadbb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.099354] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e90c964-b78f-4dd9-a385-a49befb38307 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.120026] env[63538]: DEBUG nova.compute.provider_tree [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 789.241542] env[63538]: INFO nova.compute.manager [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Took 46.91 seconds to build instance. [ 789.338129] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100900, 'name': CreateVM_Task, 'duration_secs': 0.384205} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.338508] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 789.339498] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 789.339704] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.340175] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 789.341181] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-014c492c-3064-4ff9-9cea-df91354f9470 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.343684] env[63538]: DEBUG nova.network.neutron [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Updating instance_info_cache with network_info: [{"id": "47d19b83-6292-46e2-835f-1198ef52374c", "address": "fa:16:3e:af:6f:01", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47d19b83-62", "ovs_interfaceid": "47d19b83-6292-46e2-835f-1198ef52374c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.348907] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 789.348907] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527418b6-abed-8383-7914-8661b8879b68" [ 789.348907] env[63538]: _type = "Task" [ 789.348907] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.359284] env[63538]: DEBUG oslo_concurrency.lockutils [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "b5593b74-fe89-43f5-a8c6-e73159b4efac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 789.359532] env[63538]: DEBUG oslo_concurrency.lockutils [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "b5593b74-fe89-43f5-a8c6-e73159b4efac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 789.359741] env[63538]: DEBUG oslo_concurrency.lockutils [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "b5593b74-fe89-43f5-a8c6-e73159b4efac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 789.359922] env[63538]: DEBUG oslo_concurrency.lockutils [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "b5593b74-fe89-43f5-a8c6-e73159b4efac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 789.360159] env[63538]: DEBUG oslo_concurrency.lockutils [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "b5593b74-fe89-43f5-a8c6-e73159b4efac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 789.361863] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527418b6-abed-8383-7914-8661b8879b68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.362381] env[63538]: INFO nova.compute.manager [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Terminating instance [ 789.364624] env[63538]: DEBUG nova.compute.manager [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 789.364832] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 789.365666] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-552597ce-c73f-4b68-87e3-5102bd8d4bb5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.374795] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 789.375105] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1db86a06-3fd1-4f65-8e0f-255bb26a30eb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.383217] env[63538]: DEBUG oslo_vmware.api [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 789.383217] env[63538]: value = "task-5100901" [ 789.383217] env[63538]: _type = "Task" [ 789.383217] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.393372] env[63538]: DEBUG oslo_vmware.api [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100901, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.623788] env[63538]: DEBUG nova.scheduler.client.report [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 789.671220] env[63538]: DEBUG nova.network.neutron [-] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.745983] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7950d22b-47fb-40e3-9396-f3666ea95c6f tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "e4b94aa7-7434-4a6e-b6d3-ed02315c435f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 126.707s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 789.799246] env[63538]: DEBUG nova.network.neutron [req-33b687eb-b92b-4ece-b5c7-527c8de51b0d req-13d4f91a-0a30-4699-9cfd-5f2cde91135b service nova] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Updated VIF entry in instance network info cache for port 11d7dbc5-d269-456b-9a7a-601759e64b51. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 789.799643] env[63538]: DEBUG nova.network.neutron [req-33b687eb-b92b-4ece-b5c7-527c8de51b0d req-13d4f91a-0a30-4699-9cfd-5f2cde91135b service nova] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Updating instance_info_cache with network_info: [{"id": "11d7dbc5-d269-456b-9a7a-601759e64b51", "address": "fa:16:3e:45:35:dd", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.81", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11d7dbc5-d2", "ovs_interfaceid": "11d7dbc5-d269-456b-9a7a-601759e64b51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.847582] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Releasing lock "refresh_cache-2e1b0bc7-3909-48e2-b9be-26822a57ee67" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.847801] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Updated the network info_cache for instance {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10088}} [ 789.848023] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 789.848191] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 789.848356] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 789.848513] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 789.848656] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 789.848815] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 789.848952] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63538) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10636}} [ 789.849146] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 789.861312] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527418b6-abed-8383-7914-8661b8879b68, 'name': SearchDatastore_Task, 'duration_secs': 0.029198} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.861669] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.862015] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 789.862340] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 789.862537] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.862784] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 789.863482] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0323b5a7-fc74-4c54-9e24-386a378178f9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.876317] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 789.876724] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 789.877521] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-547a142b-46d0-4836-8031-28b275b6cbb0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.888407] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 789.888407] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f998da-74a8-7fdd-d7ed-82ea09fac665" [ 789.888407] env[63538]: _type = "Task" [ 789.888407] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.894351] env[63538]: DEBUG nova.compute.manager [req-5925f169-fba3-41f5-8299-02b60a331238 req-00f98441-493c-4274-ac45-49c0e35b3e19 service nova] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Received event network-vif-deleted-27cdea75-ceda-4390-8313-cfbd1681ebd2 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 789.898606] env[63538]: DEBUG oslo_vmware.api [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100901, 'name': PowerOffVM_Task, 'duration_secs': 0.222934} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.899291] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 789.899503] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 789.899776] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf3200d1-e826-4bd7-9267-e7c5bf65fabb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.905306] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f998da-74a8-7fdd-d7ed-82ea09fac665, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.968181] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 789.968181] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 789.968181] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Deleting the datastore file [datastore2] b5593b74-fe89-43f5-a8c6-e73159b4efac {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 789.968181] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b88cd3a4-22f2-4098-88d2-53f7c8b65b4c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.978031] env[63538]: DEBUG oslo_vmware.api [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for the task: (returnval){ [ 789.978031] env[63538]: value = "task-5100903" [ 789.978031] env[63538]: _type = "Task" [ 789.978031] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.986314] env[63538]: DEBUG oslo_vmware.api [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100903, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.129052] env[63538]: DEBUG oslo_concurrency.lockutils [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.717s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.129613] env[63538]: DEBUG nova.compute.manager [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 790.133565] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.383s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.135269] env[63538]: INFO nova.compute.claims [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 790.147769] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "e4b94aa7-7434-4a6e-b6d3-ed02315c435f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.148069] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "e4b94aa7-7434-4a6e-b6d3-ed02315c435f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.148232] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "e4b94aa7-7434-4a6e-b6d3-ed02315c435f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.148766] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "e4b94aa7-7434-4a6e-b6d3-ed02315c435f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.148766] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "e4b94aa7-7434-4a6e-b6d3-ed02315c435f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.151334] env[63538]: INFO nova.compute.manager [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Terminating instance [ 790.153440] env[63538]: DEBUG nova.compute.manager [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 790.153440] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 790.154292] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f77b55e6-e564-48d5-b784-d8a103f0b5c8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.162430] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 790.162737] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c535a2cd-79ce-4c18-b8f2-12eca4024c7f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.169973] env[63538]: DEBUG oslo_vmware.api [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 790.169973] env[63538]: value = "task-5100904" [ 790.169973] env[63538]: _type = "Task" [ 790.169973] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.174951] env[63538]: INFO nova.compute.manager [-] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Took 1.86 seconds to deallocate network for instance. [ 790.180684] env[63538]: DEBUG oslo_vmware.api [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100904, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.249035] env[63538]: DEBUG nova.compute.manager [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 790.267812] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "46e2c1f4-edf7-45d6-ba77-c872005fcf1b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.268137] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "46e2c1f4-edf7-45d6-ba77-c872005fcf1b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.268387] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "46e2c1f4-edf7-45d6-ba77-c872005fcf1b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.268598] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "46e2c1f4-edf7-45d6-ba77-c872005fcf1b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.268770] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "46e2c1f4-edf7-45d6-ba77-c872005fcf1b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.271560] env[63538]: INFO nova.compute.manager [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Terminating instance [ 790.274894] env[63538]: DEBUG nova.compute.manager [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 790.275173] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 790.276071] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b23fcbae-80c2-461a-b808-750b1284b616 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.286948] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 790.287282] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-998dad3b-8dc8-4d82-8e9d-5562239c97f2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.296712] env[63538]: DEBUG oslo_vmware.api [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 790.296712] env[63538]: value = "task-5100905" [ 790.296712] env[63538]: _type = "Task" [ 790.296712] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.302672] env[63538]: DEBUG oslo_concurrency.lockutils [req-33b687eb-b92b-4ece-b5c7-527c8de51b0d req-13d4f91a-0a30-4699-9cfd-5f2cde91135b service nova] Releasing lock "refresh_cache-a2e036ae-318b-44ea-9db0-10fa3838728b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.306950] env[63538]: DEBUG oslo_vmware.api [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100905, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.355721] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.400196] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f998da-74a8-7fdd-d7ed-82ea09fac665, 'name': SearchDatastore_Task, 'duration_secs': 0.019639} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.401312] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2610c82-7d5c-4fbb-87d9-2f8287b3f14b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.407239] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 790.407239] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cfd4e0-1323-1a9c-59ae-cdc3ca379546" [ 790.407239] env[63538]: _type = "Task" [ 790.407239] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.417605] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cfd4e0-1323-1a9c-59ae-cdc3ca379546, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.486652] env[63538]: DEBUG oslo_vmware.api [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Task: {'id': task-5100903, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.373431} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.486969] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 790.487127] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 790.487310] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 790.487708] env[63538]: INFO nova.compute.manager [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Took 1.12 seconds to destroy the instance on the hypervisor. [ 790.487803] env[63538]: DEBUG oslo.service.loopingcall [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 790.487930] env[63538]: DEBUG nova.compute.manager [-] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 790.488035] env[63538]: DEBUG nova.network.neutron [-] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 790.635303] env[63538]: DEBUG nova.compute.utils [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 790.637358] env[63538]: DEBUG nova.compute.manager [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 790.637505] env[63538]: DEBUG nova.network.neutron [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 790.683942] env[63538]: DEBUG oslo_vmware.api [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100904, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.685352] env[63538]: DEBUG oslo_concurrency.lockutils [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.686873] env[63538]: DEBUG nova.policy [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '034ec4f48d8e40c9b99e83b08b9c0c5a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9427981aac124f6aa0c4d8d45b0ae917', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 790.773304] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.810137] env[63538]: DEBUG oslo_vmware.api [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100905, 'name': PowerOffVM_Task, 'duration_secs': 0.470524} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.811859] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 790.812388] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 790.812929] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3f330aba-1760-461c-9429-4263a0056645 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.882568] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 790.882807] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 790.883017] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Deleting the datastore file [datastore2] 46e2c1f4-edf7-45d6-ba77-c872005fcf1b {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 790.883321] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6871dc3d-c1ea-476a-8374-17a931ec6fba {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.891343] env[63538]: DEBUG oslo_vmware.api [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 790.891343] env[63538]: value = "task-5100907" [ 790.891343] env[63538]: _type = "Task" [ 790.891343] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.900741] env[63538]: DEBUG oslo_vmware.api [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100907, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.923507] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cfd4e0-1323-1a9c-59ae-cdc3ca379546, 'name': SearchDatastore_Task, 'duration_secs': 0.014412} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.923955] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.924289] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] a2e036ae-318b-44ea-9db0-10fa3838728b/a2e036ae-318b-44ea-9db0-10fa3838728b.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 790.924576] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-727d2190-ecfb-4e80-8ea8-ac003330ae0f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.934208] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 790.934208] env[63538]: value = "task-5100908" [ 790.934208] env[63538]: _type = "Task" [ 790.934208] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.945166] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100908, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.117195] env[63538]: DEBUG nova.compute.manager [req-597e4000-7cda-4288-9406-17dccb2b2ce6 req-8a97a52c-7bf4-4e70-a4e3-a6d06f7e9e10 service nova] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Received event network-vif-deleted-dc774ce7-f5aa-452a-828d-e56e0339fe56 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 791.118013] env[63538]: INFO nova.compute.manager [req-597e4000-7cda-4288-9406-17dccb2b2ce6 req-8a97a52c-7bf4-4e70-a4e3-a6d06f7e9e10 service nova] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Neutron deleted interface dc774ce7-f5aa-452a-828d-e56e0339fe56; detaching it from the instance and deleting it from the info cache [ 791.118013] env[63538]: DEBUG nova.network.neutron [req-597e4000-7cda-4288-9406-17dccb2b2ce6 req-8a97a52c-7bf4-4e70-a4e3-a6d06f7e9e10 service nova] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.142856] env[63538]: DEBUG nova.compute.manager [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 791.188534] env[63538]: DEBUG oslo_vmware.api [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100904, 'name': PowerOffVM_Task, 'duration_secs': 0.588148} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.188534] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 791.188808] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 791.189514] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9e04b1a9-73d6-4b98-83d8-4d2ce88db406 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.314046] env[63538]: DEBUG nova.network.neutron [-] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.361627] env[63538]: DEBUG nova.network.neutron [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Successfully created port: 0d48de93-8e4f-4795-a582-f00e76e60047 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 791.404840] env[63538]: DEBUG oslo_vmware.api [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100907, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18587} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.407286] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 791.407532] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 791.407689] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 791.407945] env[63538]: INFO nova.compute.manager [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 791.408267] env[63538]: DEBUG oslo.service.loopingcall [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 791.408805] env[63538]: DEBUG nova.compute.manager [-] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 791.408913] env[63538]: DEBUG nova.network.neutron [-] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 791.447523] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100908, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.625878] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e2287233-675e-4d57-b918-427ec5cc9fac {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.636155] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30f3519b-3541-4a0b-8422-c54d3db31eec {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.673920] env[63538]: DEBUG nova.compute.manager [req-597e4000-7cda-4288-9406-17dccb2b2ce6 req-8a97a52c-7bf4-4e70-a4e3-a6d06f7e9e10 service nova] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Detach interface failed, port_id=dc774ce7-f5aa-452a-828d-e56e0339fe56, reason: Instance b5593b74-fe89-43f5-a8c6-e73159b4efac could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 791.738390] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a6f080-a59c-40e3-8a37-00bb315e18bc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.748472] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad0eb264-b3be-461f-a331-12ba153a65e8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.785340] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a09c2b08-9df2-4317-a858-c9747cd3077b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.795310] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb3a6ac5-faa6-4ae0-95b2-b60a079afa08 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.815846] env[63538]: DEBUG nova.compute.provider_tree [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 791.820824] env[63538]: INFO nova.compute.manager [-] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Took 1.33 seconds to deallocate network for instance. [ 791.945962] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100908, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.542759} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.947353] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] a2e036ae-318b-44ea-9db0-10fa3838728b/a2e036ae-318b-44ea-9db0-10fa3838728b.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 791.947353] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 791.947353] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4931d981-8aab-4085-b275-08104f59df0b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.954948] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 791.954948] env[63538]: value = "task-5100910" [ 791.954948] env[63538]: _type = "Task" [ 791.954948] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.964222] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100910, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.159979] env[63538]: DEBUG nova.compute.manager [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 792.186703] env[63538]: DEBUG nova.virt.hardware [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 792.186981] env[63538]: DEBUG nova.virt.hardware [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 792.187219] env[63538]: DEBUG nova.virt.hardware [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 792.187440] env[63538]: DEBUG nova.virt.hardware [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 792.187600] env[63538]: DEBUG nova.virt.hardware [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 792.187771] env[63538]: DEBUG nova.virt.hardware [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 792.188053] env[63538]: DEBUG nova.virt.hardware [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 792.188267] env[63538]: DEBUG nova.virt.hardware [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 792.188452] env[63538]: DEBUG nova.virt.hardware [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 792.188639] env[63538]: DEBUG nova.virt.hardware [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 792.188832] env[63538]: DEBUG nova.virt.hardware [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 792.189762] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ac219d-3a7a-4305-be8d-217be2987b59 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.199226] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c7c976-6665-4d0d-a0fc-922051ee02fb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.278705] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 792.278955] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 792.279169] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Deleting the datastore file [datastore1] e4b94aa7-7434-4a6e-b6d3-ed02315c435f {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 792.280095] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc7b5226-3614-4c4e-a6f4-a7bc6962d3b0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.287110] env[63538]: DEBUG oslo_vmware.api [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 792.287110] env[63538]: value = "task-5100911" [ 792.287110] env[63538]: _type = "Task" [ 792.287110] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.290661] env[63538]: DEBUG nova.network.neutron [-] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.298169] env[63538]: DEBUG oslo_vmware.api [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100911, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.320306] env[63538]: DEBUG nova.scheduler.client.report [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 792.330210] env[63538]: DEBUG oslo_concurrency.lockutils [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.467493] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100910, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076225} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.467493] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 792.468163] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d27beec-ec0e-460f-8506-db182ee7306c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.492781] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] a2e036ae-318b-44ea-9db0-10fa3838728b/a2e036ae-318b-44ea-9db0-10fa3838728b.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 792.493134] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27b767d8-1bf3-420f-9dc6-6181dacfcea1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.514276] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 792.514276] env[63538]: value = "task-5100912" [ 792.514276] env[63538]: _type = "Task" [ 792.514276] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.527459] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100912, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.798293] env[63538]: INFO nova.compute.manager [-] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Took 1.39 seconds to deallocate network for instance. [ 792.798733] env[63538]: DEBUG oslo_vmware.api [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5100911, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.204232} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.800502] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 792.800709] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 792.801176] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 792.801579] env[63538]: INFO nova.compute.manager [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Took 2.65 seconds to destroy the instance on the hypervisor. [ 792.801943] env[63538]: DEBUG oslo.service.loopingcall [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 792.805308] env[63538]: DEBUG nova.compute.manager [-] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 792.805415] env[63538]: DEBUG nova.network.neutron [-] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 792.826452] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.693s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.829449] env[63538]: DEBUG oslo_concurrency.lockutils [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.474s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.831049] env[63538]: INFO nova.compute.claims [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 792.925172] env[63538]: DEBUG nova.compute.manager [req-0909b2dc-ae69-43b3-b98d-ee99e0adb07e req-264e5fab-194d-48d1-a010-da651cae70c8 service nova] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Received event network-vif-plugged-0d48de93-8e4f-4795-a582-f00e76e60047 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 792.925172] env[63538]: DEBUG oslo_concurrency.lockutils [req-0909b2dc-ae69-43b3-b98d-ee99e0adb07e req-264e5fab-194d-48d1-a010-da651cae70c8 service nova] Acquiring lock "d5d557c6-3d4e-4122-8756-218c9757fa01-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.925172] env[63538]: DEBUG oslo_concurrency.lockutils [req-0909b2dc-ae69-43b3-b98d-ee99e0adb07e req-264e5fab-194d-48d1-a010-da651cae70c8 service nova] Lock "d5d557c6-3d4e-4122-8756-218c9757fa01-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.925172] env[63538]: DEBUG oslo_concurrency.lockutils [req-0909b2dc-ae69-43b3-b98d-ee99e0adb07e req-264e5fab-194d-48d1-a010-da651cae70c8 service nova] Lock "d5d557c6-3d4e-4122-8756-218c9757fa01-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.925909] env[63538]: DEBUG nova.compute.manager [req-0909b2dc-ae69-43b3-b98d-ee99e0adb07e req-264e5fab-194d-48d1-a010-da651cae70c8 service nova] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] No waiting events found dispatching network-vif-plugged-0d48de93-8e4f-4795-a582-f00e76e60047 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 792.926290] env[63538]: WARNING nova.compute.manager [req-0909b2dc-ae69-43b3-b98d-ee99e0adb07e req-264e5fab-194d-48d1-a010-da651cae70c8 service nova] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Received unexpected event network-vif-plugged-0d48de93-8e4f-4795-a582-f00e76e60047 for instance with vm_state building and task_state spawning. [ 793.029025] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100912, 'name': ReconfigVM_Task, 'duration_secs': 0.325278} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.029025] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Reconfigured VM instance instance-00000033 to attach disk [datastore1] a2e036ae-318b-44ea-9db0-10fa3838728b/a2e036ae-318b-44ea-9db0-10fa3838728b.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 793.029025] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e9974c70-333e-4599-a653-9c79201a9eb5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.038380] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 793.038380] env[63538]: value = "task-5100913" [ 793.038380] env[63538]: _type = "Task" [ 793.038380] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.047523] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100913, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.146484] env[63538]: DEBUG nova.compute.manager [req-d073c62a-2112-45dd-b704-c4f063722d66 req-39f8ff21-ca98-4929-9b57-7a80af02b94e service nova] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Received event network-vif-deleted-cb36613d-3fcd-42c3-9f60-e642855df901 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 793.146697] env[63538]: DEBUG nova.compute.manager [req-d073c62a-2112-45dd-b704-c4f063722d66 req-39f8ff21-ca98-4929-9b57-7a80af02b94e service nova] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Received event network-vif-deleted-07400758-b7cb-4bba-9927-f1576e625a45 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 793.146913] env[63538]: INFO nova.compute.manager [req-d073c62a-2112-45dd-b704-c4f063722d66 req-39f8ff21-ca98-4929-9b57-7a80af02b94e service nova] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Neutron deleted interface 07400758-b7cb-4bba-9927-f1576e625a45; detaching it from the instance and deleting it from the info cache [ 793.147115] env[63538]: DEBUG nova.network.neutron [req-d073c62a-2112-45dd-b704-c4f063722d66 req-39f8ff21-ca98-4929-9b57-7a80af02b94e service nova] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.307101] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.331302] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Acquiring lock "a02a4858-7b36-42a2-aff9-75b453521886" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.331302] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Lock "a02a4858-7b36-42a2-aff9-75b453521886" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.480615] env[63538]: DEBUG nova.network.neutron [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Successfully updated port: 0d48de93-8e4f-4795-a582-f00e76e60047 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 793.502118] env[63538]: DEBUG nova.compute.manager [req-898305b4-1334-4739-88ad-5f9ae2f69925 req-e727e3e0-4668-4f53-86c1-3dee0515f10f service nova] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Received event network-changed-0d48de93-8e4f-4795-a582-f00e76e60047 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 793.502726] env[63538]: DEBUG nova.compute.manager [req-898305b4-1334-4739-88ad-5f9ae2f69925 req-e727e3e0-4668-4f53-86c1-3dee0515f10f service nova] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Refreshing instance network info cache due to event network-changed-0d48de93-8e4f-4795-a582-f00e76e60047. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 793.502726] env[63538]: DEBUG oslo_concurrency.lockutils [req-898305b4-1334-4739-88ad-5f9ae2f69925 req-e727e3e0-4668-4f53-86c1-3dee0515f10f service nova] Acquiring lock "refresh_cache-d5d557c6-3d4e-4122-8756-218c9757fa01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 793.502726] env[63538]: DEBUG oslo_concurrency.lockutils [req-898305b4-1334-4739-88ad-5f9ae2f69925 req-e727e3e0-4668-4f53-86c1-3dee0515f10f service nova] Acquired lock "refresh_cache-d5d557c6-3d4e-4122-8756-218c9757fa01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.502902] env[63538]: DEBUG nova.network.neutron [req-898305b4-1334-4739-88ad-5f9ae2f69925 req-e727e3e0-4668-4f53-86c1-3dee0515f10f service nova] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Refreshing network info cache for port 0d48de93-8e4f-4795-a582-f00e76e60047 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 793.547309] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100913, 'name': Rename_Task, 'duration_secs': 0.142839} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.547606] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 793.547859] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cd088620-0584-4cfe-8434-eda027325757 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.554984] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 793.554984] env[63538]: value = "task-5100914" [ 793.554984] env[63538]: _type = "Task" [ 793.554984] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.562743] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100914, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.616460] env[63538]: DEBUG nova.network.neutron [-] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.650297] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2645fdd1-21b9-4bff-96fe-d77d935e9359 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.660506] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b75e1e6-93e4-4af7-b1fd-2066a60b4a5a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.692262] env[63538]: DEBUG nova.compute.manager [req-d073c62a-2112-45dd-b704-c4f063722d66 req-39f8ff21-ca98-4929-9b57-7a80af02b94e service nova] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Detach interface failed, port_id=07400758-b7cb-4bba-9927-f1576e625a45, reason: Instance e4b94aa7-7434-4a6e-b6d3-ed02315c435f could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 793.834741] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Lock "a02a4858-7b36-42a2-aff9-75b453521886" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.504s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.835428] env[63538]: DEBUG nova.compute.manager [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 793.982801] env[63538]: DEBUG oslo_concurrency.lockutils [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "refresh_cache-d5d557c6-3d4e-4122-8756-218c9757fa01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 794.067833] env[63538]: DEBUG nova.network.neutron [req-898305b4-1334-4739-88ad-5f9ae2f69925 req-e727e3e0-4668-4f53-86c1-3dee0515f10f service nova] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 794.075238] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100914, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.119591] env[63538]: INFO nova.compute.manager [-] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Took 1.31 seconds to deallocate network for instance. [ 794.201857] env[63538]: DEBUG nova.network.neutron [req-898305b4-1334-4739-88ad-5f9ae2f69925 req-e727e3e0-4668-4f53-86c1-3dee0515f10f service nova] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.320663] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb29820f-8c1d-4b76-abe1-90e6cabcdd4f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.330257] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8081d1c0-6553-4ff1-abed-d65b9082de36 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.363893] env[63538]: DEBUG nova.compute.utils [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 794.365995] env[63538]: DEBUG nova.compute.manager [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 794.366196] env[63538]: DEBUG nova.network.neutron [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 794.368465] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf098cc-3f55-4778-8f99-069e204d0711 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.377211] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-272f2060-9054-4018-97ba-e8642910cb9d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.392095] env[63538]: DEBUG nova.compute.provider_tree [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 794.418620] env[63538]: DEBUG nova.policy [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1b3e7bfb27104b0698abd781b407d9e9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f250802c58b48ce8b9fb27eae120f56', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 794.570528] env[63538]: DEBUG oslo_vmware.api [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5100914, 'name': PowerOnVM_Task, 'duration_secs': 0.553769} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.571073] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 794.571469] env[63538]: INFO nova.compute.manager [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Took 9.55 seconds to spawn the instance on the hypervisor. [ 794.575016] env[63538]: DEBUG nova.compute.manager [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 794.575016] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5bbf782-5875-4f02-b1b6-94467bbf8b10 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.631750] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.698469] env[63538]: DEBUG nova.network.neutron [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Successfully created port: 14cf1960-9e0d-41c9-b9c1-44ff70d859e3 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 794.710871] env[63538]: DEBUG oslo_concurrency.lockutils [req-898305b4-1334-4739-88ad-5f9ae2f69925 req-e727e3e0-4668-4f53-86c1-3dee0515f10f service nova] Releasing lock "refresh_cache-d5d557c6-3d4e-4122-8756-218c9757fa01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 794.710871] env[63538]: DEBUG oslo_concurrency.lockutils [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquired lock "refresh_cache-d5d557c6-3d4e-4122-8756-218c9757fa01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.710871] env[63538]: DEBUG nova.network.neutron [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 794.869807] env[63538]: DEBUG nova.compute.manager [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 794.897025] env[63538]: DEBUG nova.scheduler.client.report [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 795.095736] env[63538]: INFO nova.compute.manager [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Took 50.09 seconds to build instance. [ 795.255203] env[63538]: DEBUG nova.network.neutron [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 795.401183] env[63538]: DEBUG oslo_concurrency.lockutils [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.572s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.401789] env[63538]: DEBUG nova.compute.manager [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 795.404798] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.121s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.406276] env[63538]: INFO nova.compute.claims [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 795.463742] env[63538]: DEBUG nova.network.neutron [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Updating instance_info_cache with network_info: [{"id": "0d48de93-8e4f-4795-a582-f00e76e60047", "address": "fa:16:3e:ca:bf:a1", "network": {"id": "955bc6e2-cea0-4cf9-80fb-521ae0e565f0", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-726504029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9427981aac124f6aa0c4d8d45b0ae917", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c297fe21-cd0b-4226-813b-a65d2358d034", "external-id": "nsx-vlan-transportzone-98", "segmentation_id": 98, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d48de93-8e", "ovs_interfaceid": "0d48de93-8e4f-4795-a582-f00e76e60047", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.597359] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2a8e9b44-105a-4a48-8f7c-b334fe1432b3 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "a2e036ae-318b-44ea-9db0-10fa3838728b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 119.279s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.885058] env[63538]: DEBUG nova.compute.manager [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 795.909992] env[63538]: DEBUG nova.virt.hardware [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 795.910318] env[63538]: DEBUG nova.virt.hardware [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 795.910488] env[63538]: DEBUG nova.virt.hardware [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 795.910681] env[63538]: DEBUG nova.virt.hardware [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 795.910870] env[63538]: DEBUG nova.virt.hardware [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 795.911094] env[63538]: DEBUG nova.virt.hardware [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 795.911225] env[63538]: DEBUG nova.virt.hardware [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 795.911446] env[63538]: DEBUG nova.virt.hardware [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 795.911716] env[63538]: DEBUG nova.virt.hardware [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 795.912117] env[63538]: DEBUG nova.virt.hardware [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 795.912386] env[63538]: DEBUG nova.virt.hardware [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 795.914040] env[63538]: DEBUG nova.compute.utils [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 795.918418] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a317df-b4ef-41e5-8a21-a9c32cdc1f8e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.922276] env[63538]: DEBUG nova.compute.manager [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 795.922451] env[63538]: DEBUG nova.network.neutron [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 795.932598] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c7382a-9160-41b6-a8fd-b06e83a295e1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.965942] env[63538]: DEBUG oslo_concurrency.lockutils [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Releasing lock "refresh_cache-d5d557c6-3d4e-4122-8756-218c9757fa01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 795.967183] env[63538]: DEBUG nova.compute.manager [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Instance network_info: |[{"id": "0d48de93-8e4f-4795-a582-f00e76e60047", "address": "fa:16:3e:ca:bf:a1", "network": {"id": "955bc6e2-cea0-4cf9-80fb-521ae0e565f0", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-726504029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9427981aac124f6aa0c4d8d45b0ae917", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c297fe21-cd0b-4226-813b-a65d2358d034", "external-id": "nsx-vlan-transportzone-98", "segmentation_id": 98, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d48de93-8e", "ovs_interfaceid": "0d48de93-8e4f-4795-a582-f00e76e60047", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 795.967183] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:bf:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c297fe21-cd0b-4226-813b-a65d2358d034', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0d48de93-8e4f-4795-a582-f00e76e60047', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 795.975228] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Creating folder: Project (9427981aac124f6aa0c4d8d45b0ae917). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 795.975622] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-627b5ad9-2f0f-4982-b1bb-c8c780fc374c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.988240] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Created folder: Project (9427981aac124f6aa0c4d8d45b0ae917) in parent group-v992234. [ 795.988518] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Creating folder: Instances. Parent ref: group-v992376. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 795.988825] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d2001053-5c2e-4f77-9483-3973c431acad {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.999242] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Created folder: Instances in parent group-v992376. [ 795.999420] env[63538]: DEBUG oslo.service.loopingcall [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 795.999635] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 795.999854] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a346909c-6c98-4aa0-b851-cea27526165d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.016173] env[63538]: DEBUG nova.policy [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b5bf72236fc049b7a22b8a2e53e4d7cd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6906dcd3e0074931bdbe4233fbc2bf95', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 796.022745] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 796.022745] env[63538]: value = "task-5100917" [ 796.022745] env[63538]: _type = "Task" [ 796.022745] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.031389] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100917, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.103519] env[63538]: DEBUG nova.compute.manager [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 796.215284] env[63538]: DEBUG nova.compute.manager [req-f45c4200-d2d6-4ec7-b0fd-f9e7cf1fd199 req-5f1daffc-09ec-4c90-b308-42aa70f051c8 service nova] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Received event network-vif-plugged-14cf1960-9e0d-41c9-b9c1-44ff70d859e3 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 796.215284] env[63538]: DEBUG oslo_concurrency.lockutils [req-f45c4200-d2d6-4ec7-b0fd-f9e7cf1fd199 req-5f1daffc-09ec-4c90-b308-42aa70f051c8 service nova] Acquiring lock "f703cd1c-4b77-4a85-a91b-63a2bd0e84a9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.215284] env[63538]: DEBUG oslo_concurrency.lockutils [req-f45c4200-d2d6-4ec7-b0fd-f9e7cf1fd199 req-5f1daffc-09ec-4c90-b308-42aa70f051c8 service nova] Lock "f703cd1c-4b77-4a85-a91b-63a2bd0e84a9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.218312] env[63538]: DEBUG oslo_concurrency.lockutils [req-f45c4200-d2d6-4ec7-b0fd-f9e7cf1fd199 req-5f1daffc-09ec-4c90-b308-42aa70f051c8 service nova] Lock "f703cd1c-4b77-4a85-a91b-63a2bd0e84a9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.218312] env[63538]: DEBUG nova.compute.manager [req-f45c4200-d2d6-4ec7-b0fd-f9e7cf1fd199 req-5f1daffc-09ec-4c90-b308-42aa70f051c8 service nova] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] No waiting events found dispatching network-vif-plugged-14cf1960-9e0d-41c9-b9c1-44ff70d859e3 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 796.218312] env[63538]: WARNING nova.compute.manager [req-f45c4200-d2d6-4ec7-b0fd-f9e7cf1fd199 req-5f1daffc-09ec-4c90-b308-42aa70f051c8 service nova] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Received unexpected event network-vif-plugged-14cf1960-9e0d-41c9-b9c1-44ff70d859e3 for instance with vm_state building and task_state spawning. [ 796.359396] env[63538]: DEBUG nova.network.neutron [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Successfully updated port: 14cf1960-9e0d-41c9-b9c1-44ff70d859e3 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 796.428357] env[63538]: DEBUG nova.compute.manager [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 796.460699] env[63538]: DEBUG nova.network.neutron [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Successfully created port: 377fafa3-5b12-4619-8d84-bf0b09188cd6 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 796.538230] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100917, 'name': CreateVM_Task, 'duration_secs': 0.393806} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.538464] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 796.539844] env[63538]: DEBUG oslo_concurrency.lockutils [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 796.539844] env[63538]: DEBUG oslo_concurrency.lockutils [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.539844] env[63538]: DEBUG oslo_concurrency.lockutils [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 796.540082] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4846ab7-8175-40d5-b27a-c7ad1825cc61 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.547051] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 796.547051] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521041de-0d39-d662-252e-a4843fb1a744" [ 796.547051] env[63538]: _type = "Task" [ 796.547051] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.555510] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521041de-0d39-d662-252e-a4843fb1a744, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.625297] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.780835] env[63538]: DEBUG nova.compute.manager [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Stashing vm_state: active {{(pid=63538) _prep_resize /opt/stack/nova/nova/compute/manager.py:5670}} [ 796.818531] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Acquiring lock "376ee3d9-e8b5-4f47-9622-b873126b492e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.818998] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lock "376ee3d9-e8b5-4f47-9622-b873126b492e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.861823] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Acquiring lock "refresh_cache-f703cd1c-4b77-4a85-a91b-63a2bd0e84a9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 796.861823] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Acquired lock "refresh_cache-f703cd1c-4b77-4a85-a91b-63a2bd0e84a9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.862737] env[63538]: DEBUG nova.network.neutron [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 796.882299] env[63538]: DEBUG nova.network.neutron [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Successfully created port: 831b08d0-41bf-439d-8e03-090fc08a5815 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 797.061203] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521041de-0d39-d662-252e-a4843fb1a744, 'name': SearchDatastore_Task, 'duration_secs': 0.01194} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.062807] env[63538]: DEBUG oslo_concurrency.lockutils [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 797.063334] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 797.063790] env[63538]: DEBUG oslo_concurrency.lockutils [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.064143] env[63538]: DEBUG oslo_concurrency.lockutils [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.064488] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 797.068121] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1a014cd7-4476-4417-bbfb-ffce9c01433e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.078301] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 797.078515] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 797.084273] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2c719e2-3950-4ab7-9637-b3c4e077f567 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.089875] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-546b8201-e6ae-4719-9b07-588e4472f3ec {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.099868] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2cd313a-c5bc-4c86-b49f-73da88f033a6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.103728] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 797.103728] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f28320-da9d-e17c-958d-4497b0b96052" [ 797.103728] env[63538]: _type = "Task" [ 797.103728] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.133834] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d983279-8da6-4658-b121-eae3f95e59a0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.141183] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f28320-da9d-e17c-958d-4497b0b96052, 'name': SearchDatastore_Task, 'duration_secs': 0.011498} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.142898] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9b926d2-e4a5-4cca-9fd4-3527118d0703 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.151615] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Acquiring lock "5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.151964] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lock "5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.153260] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f48ea36f-d89a-4941-872a-aabbdde16331 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.159767] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 797.159767] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ea4e10-555e-1644-c6c5-1f2469d03af3" [ 797.159767] env[63538]: _type = "Task" [ 797.159767] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.175977] env[63538]: DEBUG nova.compute.provider_tree [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 797.182987] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ea4e10-555e-1644-c6c5-1f2469d03af3, 'name': SearchDatastore_Task, 'duration_secs': 0.011688} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.183281] env[63538]: DEBUG oslo_concurrency.lockutils [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 797.183537] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] d5d557c6-3d4e-4122-8756-218c9757fa01/d5d557c6-3d4e-4122-8756-218c9757fa01.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 797.184139] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-62c0773a-5052-42d0-8e48-003382692f57 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.191541] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 797.191541] env[63538]: value = "task-5100918" [ 797.191541] env[63538]: _type = "Task" [ 797.191541] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.200813] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5100918, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.275100] env[63538]: DEBUG nova.network.neutron [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Successfully created port: 3700c423-faa4-4788-a282-65acd1bbfe9d {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 797.298821] env[63538]: DEBUG oslo_concurrency.lockutils [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.400197] env[63538]: DEBUG nova.network.neutron [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 797.448056] env[63538]: DEBUG nova.compute.manager [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 797.543423] env[63538]: DEBUG nova.virt.hardware [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 797.543789] env[63538]: DEBUG nova.virt.hardware [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 797.544107] env[63538]: DEBUG nova.virt.hardware [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 797.544252] env[63538]: DEBUG nova.virt.hardware [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 797.544420] env[63538]: DEBUG nova.virt.hardware [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 797.544568] env[63538]: DEBUG nova.virt.hardware [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 797.544781] env[63538]: DEBUG nova.virt.hardware [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 797.544963] env[63538]: DEBUG nova.virt.hardware [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 797.545199] env[63538]: DEBUG nova.virt.hardware [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 797.545427] env[63538]: DEBUG nova.virt.hardware [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 797.545614] env[63538]: DEBUG nova.virt.hardware [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 797.546669] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b145a689-b6a1-4cd0-853f-4503c238c801 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.560206] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e75d08ed-3719-4f76-bc3a-d707633d41c9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.681273] env[63538]: DEBUG nova.scheduler.client.report [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 797.686914] env[63538]: DEBUG nova.network.neutron [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Updating instance_info_cache with network_info: [{"id": "14cf1960-9e0d-41c9-b9c1-44ff70d859e3", "address": "fa:16:3e:f4:3d:a9", "network": {"id": "d545c161-1db4-4c9a-b508-9f7be57bf88c", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-313207436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f250802c58b48ce8b9fb27eae120f56", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14cf1960-9e", "ovs_interfaceid": "14cf1960-9e0d-41c9-b9c1-44ff70d859e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.704372] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5100918, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.190358] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.785s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.190897] env[63538]: DEBUG nova.compute.manager [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 798.194075] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Releasing lock "refresh_cache-f703cd1c-4b77-4a85-a91b-63a2bd0e84a9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 798.194441] env[63538]: DEBUG nova.compute.manager [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Instance network_info: |[{"id": "14cf1960-9e0d-41c9-b9c1-44ff70d859e3", "address": "fa:16:3e:f4:3d:a9", "network": {"id": "d545c161-1db4-4c9a-b508-9f7be57bf88c", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-313207436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f250802c58b48ce8b9fb27eae120f56", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14cf1960-9e", "ovs_interfaceid": "14cf1960-9e0d-41c9-b9c1-44ff70d859e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 798.194977] env[63538]: DEBUG oslo_concurrency.lockutils [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.623s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.196538] env[63538]: INFO nova.compute.claims [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 798.199595] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:3d:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '14cf1960-9e0d-41c9-b9c1-44ff70d859e3', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 798.208105] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Creating folder: Project (3f250802c58b48ce8b9fb27eae120f56). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 798.211965] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d318eb6-99d0-417c-b637-f49b94feb3cf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.220940] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5100918, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.225041] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Created folder: Project (3f250802c58b48ce8b9fb27eae120f56) in parent group-v992234. [ 798.225553] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Creating folder: Instances. Parent ref: group-v992379. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 798.226013] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e8546766-2013-48a8-9be7-df70c56c568d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.237315] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Created folder: Instances in parent group-v992379. [ 798.237669] env[63538]: DEBUG oslo.service.loopingcall [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 798.237938] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 798.238236] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8f518595-8385-4a71-95e8-b981fb9b2fbd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.260846] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 798.260846] env[63538]: value = "task-5100921" [ 798.260846] env[63538]: _type = "Task" [ 798.260846] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.272529] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100921, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.299182] env[63538]: DEBUG nova.compute.manager [req-c6d71ada-4b11-4f57-ad2d-3d6c0f7a0496 req-86038c62-1e0f-490d-8316-1a430817875f service nova] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Received event network-changed-14cf1960-9e0d-41c9-b9c1-44ff70d859e3 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 798.299435] env[63538]: DEBUG nova.compute.manager [req-c6d71ada-4b11-4f57-ad2d-3d6c0f7a0496 req-86038c62-1e0f-490d-8316-1a430817875f service nova] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Refreshing instance network info cache due to event network-changed-14cf1960-9e0d-41c9-b9c1-44ff70d859e3. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 798.299693] env[63538]: DEBUG oslo_concurrency.lockutils [req-c6d71ada-4b11-4f57-ad2d-3d6c0f7a0496 req-86038c62-1e0f-490d-8316-1a430817875f service nova] Acquiring lock "refresh_cache-f703cd1c-4b77-4a85-a91b-63a2bd0e84a9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 798.299867] env[63538]: DEBUG oslo_concurrency.lockutils [req-c6d71ada-4b11-4f57-ad2d-3d6c0f7a0496 req-86038c62-1e0f-490d-8316-1a430817875f service nova] Acquired lock "refresh_cache-f703cd1c-4b77-4a85-a91b-63a2bd0e84a9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.300062] env[63538]: DEBUG nova.network.neutron [req-c6d71ada-4b11-4f57-ad2d-3d6c0f7a0496 req-86038c62-1e0f-490d-8316-1a430817875f service nova] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Refreshing network info cache for port 14cf1960-9e0d-41c9-b9c1-44ff70d859e3 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 798.465711] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Acquiring lock "5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.466136] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Lock "5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.466475] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Acquiring lock "5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.466735] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Lock "5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.466931] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Lock "5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.469995] env[63538]: INFO nova.compute.manager [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Terminating instance [ 798.472251] env[63538]: DEBUG nova.compute.manager [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 798.472499] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 798.473439] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dda7244-470e-49be-8851-c46ee711b87f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.481504] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 798.481791] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f2be62b0-d53b-4858-9dd6-50145a2e3108 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.489055] env[63538]: DEBUG oslo_vmware.api [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for the task: (returnval){ [ 798.489055] env[63538]: value = "task-5100922" [ 798.489055] env[63538]: _type = "Task" [ 798.489055] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.500186] env[63538]: DEBUG oslo_vmware.api [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100922, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.697756] env[63538]: DEBUG nova.compute.utils [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 798.699031] env[63538]: DEBUG nova.compute.manager [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 798.699172] env[63538]: DEBUG nova.network.neutron [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 798.713073] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5100918, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.773298] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100921, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.783483] env[63538]: DEBUG nova.policy [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f3038195cd7349769337b89497afcd55', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2c423db834f544fe91eec4d04dec5a0b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 799.000217] env[63538]: DEBUG oslo_vmware.api [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100922, 'name': PowerOffVM_Task, 'duration_secs': 0.246689} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.000548] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 799.000732] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 799.000997] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fe3b3367-421f-4020-98c8-8266b011b9fe {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.089818] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 799.090178] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 799.090373] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Deleting the datastore file [datastore1] 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 799.090665] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-27d549f0-05dc-4e81-9927-063c51af675a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.099305] env[63538]: DEBUG oslo_vmware.api [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for the task: (returnval){ [ 799.099305] env[63538]: value = "task-5100924" [ 799.099305] env[63538]: _type = "Task" [ 799.099305] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.113202] env[63538]: DEBUG oslo_vmware.api [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100924, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.198085] env[63538]: DEBUG nova.network.neutron [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Successfully created port: 90b87b42-1aac-4697-91e2-84193dd93b89 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 799.204663] env[63538]: DEBUG nova.compute.manager [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 799.206844] env[63538]: DEBUG nova.network.neutron [req-c6d71ada-4b11-4f57-ad2d-3d6c0f7a0496 req-86038c62-1e0f-490d-8316-1a430817875f service nova] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Updated VIF entry in instance network info cache for port 14cf1960-9e0d-41c9-b9c1-44ff70d859e3. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 799.207209] env[63538]: DEBUG nova.network.neutron [req-c6d71ada-4b11-4f57-ad2d-3d6c0f7a0496 req-86038c62-1e0f-490d-8316-1a430817875f service nova] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Updating instance_info_cache with network_info: [{"id": "14cf1960-9e0d-41c9-b9c1-44ff70d859e3", "address": "fa:16:3e:f4:3d:a9", "network": {"id": "d545c161-1db4-4c9a-b508-9f7be57bf88c", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-313207436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f250802c58b48ce8b9fb27eae120f56", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14cf1960-9e", "ovs_interfaceid": "14cf1960-9e0d-41c9-b9c1-44ff70d859e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.221605] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5100918, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.583843} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.222609] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] d5d557c6-3d4e-4122-8756-218c9757fa01/d5d557c6-3d4e-4122-8756-218c9757fa01.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 799.223472] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 799.225239] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d6caf61f-6739-4fe8-984b-a02104e95e23 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.232984] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 799.232984] env[63538]: value = "task-5100925" [ 799.232984] env[63538]: _type = "Task" [ 799.232984] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.243969] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5100925, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.274940] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100921, 'name': CreateVM_Task, 'duration_secs': 0.852146} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.275415] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 799.276444] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.278514] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.279029] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 799.280273] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e5fb737-d75f-4b7c-8484-f9e23b66fb88 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.293520] env[63538]: DEBUG oslo_vmware.api [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Waiting for the task: (returnval){ [ 799.293520] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52359e54-777c-5458-15be-edbb554ac15a" [ 799.293520] env[63538]: _type = "Task" [ 799.293520] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.304910] env[63538]: DEBUG oslo_vmware.api [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52359e54-777c-5458-15be-edbb554ac15a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.322367] env[63538]: DEBUG nova.compute.manager [req-9d161dab-f9f5-41c3-8609-4059a94e23e2 req-f6e90ed3-b4b4-4fb1-968b-4fcaeaec2125 service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] Received event network-vif-plugged-377fafa3-5b12-4619-8d84-bf0b09188cd6 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 799.323226] env[63538]: DEBUG oslo_concurrency.lockutils [req-9d161dab-f9f5-41c3-8609-4059a94e23e2 req-f6e90ed3-b4b4-4fb1-968b-4fcaeaec2125 service nova] Acquiring lock "6850191a-4190-4795-ae18-830b41a76085-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.323545] env[63538]: DEBUG oslo_concurrency.lockutils [req-9d161dab-f9f5-41c3-8609-4059a94e23e2 req-f6e90ed3-b4b4-4fb1-968b-4fcaeaec2125 service nova] Lock "6850191a-4190-4795-ae18-830b41a76085-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.323779] env[63538]: DEBUG oslo_concurrency.lockutils [req-9d161dab-f9f5-41c3-8609-4059a94e23e2 req-f6e90ed3-b4b4-4fb1-968b-4fcaeaec2125 service nova] Lock "6850191a-4190-4795-ae18-830b41a76085-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.324175] env[63538]: DEBUG nova.compute.manager [req-9d161dab-f9f5-41c3-8609-4059a94e23e2 req-f6e90ed3-b4b4-4fb1-968b-4fcaeaec2125 service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] No waiting events found dispatching network-vif-plugged-377fafa3-5b12-4619-8d84-bf0b09188cd6 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 799.324310] env[63538]: WARNING nova.compute.manager [req-9d161dab-f9f5-41c3-8609-4059a94e23e2 req-f6e90ed3-b4b4-4fb1-968b-4fcaeaec2125 service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] Received unexpected event network-vif-plugged-377fafa3-5b12-4619-8d84-bf0b09188cd6 for instance with vm_state building and task_state spawning. [ 799.423472] env[63538]: DEBUG nova.network.neutron [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Successfully updated port: 377fafa3-5b12-4619-8d84-bf0b09188cd6 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 799.609535] env[63538]: DEBUG oslo_vmware.api [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Task: {'id': task-5100924, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.483209} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.609821] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 799.610173] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 799.610394] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 799.610500] env[63538]: INFO nova.compute.manager [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 799.610757] env[63538]: DEBUG oslo.service.loopingcall [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 799.615831] env[63538]: DEBUG nova.compute.manager [-] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 799.615831] env[63538]: DEBUG nova.network.neutron [-] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 799.713304] env[63538]: DEBUG oslo_concurrency.lockutils [req-c6d71ada-4b11-4f57-ad2d-3d6c0f7a0496 req-86038c62-1e0f-490d-8316-1a430817875f service nova] Releasing lock "refresh_cache-f703cd1c-4b77-4a85-a91b-63a2bd0e84a9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 799.743627] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5100925, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070151} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.746470] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 799.748142] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd2c7b3-9e2d-437d-89f8-c52cc5813be3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.771342] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] d5d557c6-3d4e-4122-8756-218c9757fa01/d5d557c6-3d4e-4122-8756-218c9757fa01.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 799.775171] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ba8e190-bde8-40f5-95fd-867bf7f2bb0c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.800151] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 799.800151] env[63538]: value = "task-5100926" [ 799.800151] env[63538]: _type = "Task" [ 799.800151] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.809179] env[63538]: DEBUG oslo_vmware.api [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52359e54-777c-5458-15be-edbb554ac15a, 'name': SearchDatastore_Task, 'duration_secs': 0.012783} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.810026] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db34bc0d-60af-4624-8956-96d18f01d319 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.813083] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 799.814073] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 799.814372] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.814846] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.815135] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 799.819475] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ba77ae6b-dd48-481a-8249-7ea67eec83d9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.822530] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5100926, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.831850] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ead49a-68d3-48a8-9e45-ab0102eb6b35 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.836186] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 799.836459] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 799.866515] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d996becb-97b9-40db-aa54-9fc293fa238e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.870549] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed3c6aa6-fee1-4db0-96ec-93a71f9a0c50 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.880756] env[63538]: DEBUG oslo_vmware.api [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Waiting for the task: (returnval){ [ 799.880756] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cb495a-2953-f41d-a3da-80ad4849a777" [ 799.880756] env[63538]: _type = "Task" [ 799.880756] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.883566] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af5dd3f8-0acc-4d74-9720-feb047405438 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.893838] env[63538]: DEBUG oslo_vmware.api [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cb495a-2953-f41d-a3da-80ad4849a777, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.902454] env[63538]: DEBUG nova.compute.provider_tree [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 800.215972] env[63538]: DEBUG nova.compute.manager [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 800.248823] env[63538]: DEBUG nova.virt.hardware [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 800.249103] env[63538]: DEBUG nova.virt.hardware [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 800.249278] env[63538]: DEBUG nova.virt.hardware [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 800.249485] env[63538]: DEBUG nova.virt.hardware [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 800.250364] env[63538]: DEBUG nova.virt.hardware [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 800.250619] env[63538]: DEBUG nova.virt.hardware [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 800.250856] env[63538]: DEBUG nova.virt.hardware [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 800.251252] env[63538]: DEBUG nova.virt.hardware [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 800.251527] env[63538]: DEBUG nova.virt.hardware [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 800.251841] env[63538]: DEBUG nova.virt.hardware [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 800.252063] env[63538]: DEBUG nova.virt.hardware [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 800.252987] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b5adfde-261b-4565-9848-567983b32b30 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.262414] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb87cb8c-5900-4a0e-b71d-b87eaac73395 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.310659] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5100926, 'name': ReconfigVM_Task, 'duration_secs': 0.407087} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.310977] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Reconfigured VM instance instance-00000034 to attach disk [datastore2] d5d557c6-3d4e-4122-8756-218c9757fa01/d5d557c6-3d4e-4122-8756-218c9757fa01.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 800.311672] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-505fccf4-afe1-42a9-b5b1-242e3410daa9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.319104] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 800.319104] env[63538]: value = "task-5100927" [ 800.319104] env[63538]: _type = "Task" [ 800.319104] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.329740] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5100927, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.391134] env[63538]: DEBUG oslo_vmware.api [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cb495a-2953-f41d-a3da-80ad4849a777, 'name': SearchDatastore_Task, 'duration_secs': 0.016923} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.392088] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cba06484-50b8-4eea-bd4d-da337f57493e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.398980] env[63538]: DEBUG oslo_vmware.api [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Waiting for the task: (returnval){ [ 800.398980] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]529cb37f-e629-e35a-26d9-a7f3524c7971" [ 800.398980] env[63538]: _type = "Task" [ 800.398980] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.409846] env[63538]: DEBUG nova.scheduler.client.report [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 800.413354] env[63538]: DEBUG oslo_vmware.api [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]529cb37f-e629-e35a-26d9-a7f3524c7971, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.831121] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5100927, 'name': Rename_Task, 'duration_secs': 0.144595} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.831450] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 800.831835] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-36131096-e948-40fb-bd5a-37c605395a9d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.839484] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 800.839484] env[63538]: value = "task-5100928" [ 800.839484] env[63538]: _type = "Task" [ 800.839484] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.851326] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5100928, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.909603] env[63538]: DEBUG oslo_vmware.api [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]529cb37f-e629-e35a-26d9-a7f3524c7971, 'name': SearchDatastore_Task, 'duration_secs': 0.010235} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.910183] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.910285] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] f703cd1c-4b77-4a85-a91b-63a2bd0e84a9/f703cd1c-4b77-4a85-a91b-63a2bd0e84a9.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 800.910787] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5ac23a9-353e-47aa-85dc-211af04d1a17 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.915668] env[63538]: DEBUG oslo_concurrency.lockutils [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.720s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.915668] env[63538]: DEBUG nova.compute.manager [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 800.920545] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.251s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.921342] env[63538]: INFO nova.compute.claims [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 800.924493] env[63538]: DEBUG oslo_vmware.api [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Waiting for the task: (returnval){ [ 800.924493] env[63538]: value = "task-5100929" [ 800.924493] env[63538]: _type = "Task" [ 800.924493] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.934637] env[63538]: DEBUG oslo_vmware.api [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Task: {'id': task-5100929, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.038899] env[63538]: DEBUG nova.compute.manager [req-4a0c2d7d-25fa-45c8-bcfb-65664c1ecd90 req-ca5d4edd-ac8f-4378-8b79-b4cb4827754c service nova] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Received event network-vif-plugged-90b87b42-1aac-4697-91e2-84193dd93b89 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 801.039020] env[63538]: DEBUG oslo_concurrency.lockutils [req-4a0c2d7d-25fa-45c8-bcfb-65664c1ecd90 req-ca5d4edd-ac8f-4378-8b79-b4cb4827754c service nova] Acquiring lock "5bf7ed57-62d5-4abc-96d8-78b979baed92-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.039832] env[63538]: DEBUG oslo_concurrency.lockutils [req-4a0c2d7d-25fa-45c8-bcfb-65664c1ecd90 req-ca5d4edd-ac8f-4378-8b79-b4cb4827754c service nova] Lock "5bf7ed57-62d5-4abc-96d8-78b979baed92-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.040041] env[63538]: DEBUG oslo_concurrency.lockutils [req-4a0c2d7d-25fa-45c8-bcfb-65664c1ecd90 req-ca5d4edd-ac8f-4378-8b79-b4cb4827754c service nova] Lock "5bf7ed57-62d5-4abc-96d8-78b979baed92-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 801.040266] env[63538]: DEBUG nova.compute.manager [req-4a0c2d7d-25fa-45c8-bcfb-65664c1ecd90 req-ca5d4edd-ac8f-4378-8b79-b4cb4827754c service nova] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] No waiting events found dispatching network-vif-plugged-90b87b42-1aac-4697-91e2-84193dd93b89 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 801.040395] env[63538]: WARNING nova.compute.manager [req-4a0c2d7d-25fa-45c8-bcfb-65664c1ecd90 req-ca5d4edd-ac8f-4378-8b79-b4cb4827754c service nova] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Received unexpected event network-vif-plugged-90b87b42-1aac-4697-91e2-84193dd93b89 for instance with vm_state building and task_state spawning. [ 801.111088] env[63538]: DEBUG nova.network.neutron [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Successfully updated port: 90b87b42-1aac-4697-91e2-84193dd93b89 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 801.174098] env[63538]: DEBUG nova.network.neutron [-] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.351466] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5100928, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.429176] env[63538]: DEBUG nova.compute.utils [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 801.431054] env[63538]: DEBUG nova.compute.manager [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 801.431280] env[63538]: DEBUG nova.network.neutron [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 801.445225] env[63538]: DEBUG oslo_vmware.api [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Task: {'id': task-5100929, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.465614] env[63538]: DEBUG nova.compute.manager [req-cfd38667-39d7-4120-b0d8-998b8367d6f3 req-a4fae4a0-ed6b-4d6d-99ef-b7df9e8fff98 service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] Received event network-changed-377fafa3-5b12-4619-8d84-bf0b09188cd6 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 801.465614] env[63538]: DEBUG nova.compute.manager [req-cfd38667-39d7-4120-b0d8-998b8367d6f3 req-a4fae4a0-ed6b-4d6d-99ef-b7df9e8fff98 service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] Refreshing instance network info cache due to event network-changed-377fafa3-5b12-4619-8d84-bf0b09188cd6. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 801.465614] env[63538]: DEBUG oslo_concurrency.lockutils [req-cfd38667-39d7-4120-b0d8-998b8367d6f3 req-a4fae4a0-ed6b-4d6d-99ef-b7df9e8fff98 service nova] Acquiring lock "refresh_cache-6850191a-4190-4795-ae18-830b41a76085" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.465614] env[63538]: DEBUG oslo_concurrency.lockutils [req-cfd38667-39d7-4120-b0d8-998b8367d6f3 req-a4fae4a0-ed6b-4d6d-99ef-b7df9e8fff98 service nova] Acquired lock "refresh_cache-6850191a-4190-4795-ae18-830b41a76085" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.465796] env[63538]: DEBUG nova.network.neutron [req-cfd38667-39d7-4120-b0d8-998b8367d6f3 req-a4fae4a0-ed6b-4d6d-99ef-b7df9e8fff98 service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] Refreshing network info cache for port 377fafa3-5b12-4619-8d84-bf0b09188cd6 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 801.481614] env[63538]: DEBUG nova.policy [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '884364daa8f746fb9b32a8b33c9e2cfd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea05f3fb4676466bb2a286f5a2fefb8f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 801.613918] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Acquiring lock "refresh_cache-5bf7ed57-62d5-4abc-96d8-78b979baed92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.614126] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Acquired lock "refresh_cache-5bf7ed57-62d5-4abc-96d8-78b979baed92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.614282] env[63538]: DEBUG nova.network.neutron [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 801.679321] env[63538]: INFO nova.compute.manager [-] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Took 2.06 seconds to deallocate network for instance. [ 801.776818] env[63538]: DEBUG nova.network.neutron [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Successfully created port: 384175ce-d150-4f04-ad8f-d65790b79e5e {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 801.857609] env[63538]: DEBUG oslo_vmware.api [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5100928, 'name': PowerOnVM_Task, 'duration_secs': 0.724614} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.857880] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 801.858156] env[63538]: INFO nova.compute.manager [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Took 9.70 seconds to spawn the instance on the hypervisor. [ 801.858360] env[63538]: DEBUG nova.compute.manager [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 801.862246] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101c3f4b-c4c4-47a1-b6ab-762599b152a6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.947821] env[63538]: DEBUG nova.compute.manager [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 801.965433] env[63538]: DEBUG oslo_vmware.api [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Task: {'id': task-5100929, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530725} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.965695] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] f703cd1c-4b77-4a85-a91b-63a2bd0e84a9/f703cd1c-4b77-4a85-a91b-63a2bd0e84a9.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 801.965921] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 801.966210] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-75d16d9e-fb2f-4a1d-9ffa-845d72bf3997 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.979311] env[63538]: DEBUG oslo_vmware.api [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Waiting for the task: (returnval){ [ 801.979311] env[63538]: value = "task-5100930" [ 801.979311] env[63538]: _type = "Task" [ 801.979311] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.992735] env[63538]: DEBUG oslo_vmware.api [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Task: {'id': task-5100930, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.028547] env[63538]: DEBUG nova.network.neutron [req-cfd38667-39d7-4120-b0d8-998b8367d6f3 req-a4fae4a0-ed6b-4d6d-99ef-b7df9e8fff98 service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 802.190225] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.196594] env[63538]: DEBUG nova.network.neutron [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Successfully updated port: 831b08d0-41bf-439d-8e03-090fc08a5815 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 802.204302] env[63538]: DEBUG nova.network.neutron [req-cfd38667-39d7-4120-b0d8-998b8367d6f3 req-a4fae4a0-ed6b-4d6d-99ef-b7df9e8fff98 service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.350830] env[63538]: DEBUG nova.network.neutron [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 802.380761] env[63538]: INFO nova.compute.manager [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Took 53.59 seconds to build instance. [ 802.497302] env[63538]: DEBUG oslo_vmware.api [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Task: {'id': task-5100930, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076714} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.498215] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 802.500481] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6a69599-75e6-42f4-b859-9b3dc7ccee2f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.538644] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] f703cd1c-4b77-4a85-a91b-63a2bd0e84a9/f703cd1c-4b77-4a85-a91b-63a2bd0e84a9.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 802.543871] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be22899c-1ce5-49c9-b5c3-22e57f269b62 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.565799] env[63538]: DEBUG oslo_vmware.api [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Waiting for the task: (returnval){ [ 802.565799] env[63538]: value = "task-5100931" [ 802.565799] env[63538]: _type = "Task" [ 802.565799] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.574772] env[63538]: DEBUG oslo_vmware.api [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Task: {'id': task-5100931, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.618018] env[63538]: DEBUG nova.network.neutron [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Updating instance_info_cache with network_info: [{"id": "90b87b42-1aac-4697-91e2-84193dd93b89", "address": "fa:16:3e:1c:46:57", "network": {"id": "0d94690c-811c-4601-ba43-b1ed296c0a4d", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1543191410-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c423db834f544fe91eec4d04dec5a0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ee9f433-666e-4d74-96df-c7c7a6ac7fda", "external-id": "nsx-vlan-transportzone-499", "segmentation_id": 499, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90b87b42-1a", "ovs_interfaceid": "90b87b42-1aac-4697-91e2-84193dd93b89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.666655] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c0d385d-c8f6-41df-9d9c-0fcf698007d9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.676172] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-812675bb-cab5-46a5-b562-b61d3c25d969 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.711256] env[63538]: DEBUG oslo_concurrency.lockutils [req-cfd38667-39d7-4120-b0d8-998b8367d6f3 req-a4fae4a0-ed6b-4d6d-99ef-b7df9e8fff98 service nova] Releasing lock "refresh_cache-6850191a-4190-4795-ae18-830b41a76085" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.711522] env[63538]: DEBUG nova.compute.manager [req-cfd38667-39d7-4120-b0d8-998b8367d6f3 req-a4fae4a0-ed6b-4d6d-99ef-b7df9e8fff98 service nova] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Received event network-vif-deleted-6d45b11c-cd3e-4589-9931-5ffdbbc4e193 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 802.712779] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f32caf-1c99-4dac-9369-e597273912b4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.721625] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f7766dc-24f4-42ac-b935-02492cf24086 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.736285] env[63538]: DEBUG nova.compute.provider_tree [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 802.885282] env[63538]: DEBUG oslo_concurrency.lockutils [None req-448c311d-d2c5-4fab-a9da-d415f0bcbf15 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "d5d557c6-3d4e-4122-8756-218c9757fa01" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.906s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.960163] env[63538]: DEBUG nova.compute.manager [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 802.990588] env[63538]: DEBUG nova.virt.hardware [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 802.991081] env[63538]: DEBUG nova.virt.hardware [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 802.991303] env[63538]: DEBUG nova.virt.hardware [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 802.991568] env[63538]: DEBUG nova.virt.hardware [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 802.991766] env[63538]: DEBUG nova.virt.hardware [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 802.991991] env[63538]: DEBUG nova.virt.hardware [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 802.992283] env[63538]: DEBUG nova.virt.hardware [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 802.992496] env[63538]: DEBUG nova.virt.hardware [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 802.992718] env[63538]: DEBUG nova.virt.hardware [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 802.992936] env[63538]: DEBUG nova.virt.hardware [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 802.993192] env[63538]: DEBUG nova.virt.hardware [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 802.994338] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e948a76-2cf9-4379-a0d8-5906fd6411b3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.005160] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-872666e9-c3e1-4625-8d86-d8cf325ba2b4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.078745] env[63538]: DEBUG oslo_vmware.api [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Task: {'id': task-5100931, 'name': ReconfigVM_Task, 'duration_secs': 0.337172} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.078745] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Reconfigured VM instance instance-00000035 to attach disk [datastore1] f703cd1c-4b77-4a85-a91b-63a2bd0e84a9/f703cd1c-4b77-4a85-a91b-63a2bd0e84a9.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 803.078745] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e3f57efc-9259-4742-ae1f-cd1462c08895 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.087023] env[63538]: DEBUG oslo_vmware.api [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Waiting for the task: (returnval){ [ 803.087023] env[63538]: value = "task-5100932" [ 803.087023] env[63538]: _type = "Task" [ 803.087023] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.097048] env[63538]: DEBUG oslo_vmware.api [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Task: {'id': task-5100932, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.124026] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Releasing lock "refresh_cache-5bf7ed57-62d5-4abc-96d8-78b979baed92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.124026] env[63538]: DEBUG nova.compute.manager [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Instance network_info: |[{"id": "90b87b42-1aac-4697-91e2-84193dd93b89", "address": "fa:16:3e:1c:46:57", "network": {"id": "0d94690c-811c-4601-ba43-b1ed296c0a4d", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1543191410-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c423db834f544fe91eec4d04dec5a0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ee9f433-666e-4d74-96df-c7c7a6ac7fda", "external-id": "nsx-vlan-transportzone-499", "segmentation_id": 499, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90b87b42-1a", "ovs_interfaceid": "90b87b42-1aac-4697-91e2-84193dd93b89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 803.124026] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:46:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ee9f433-666e-4d74-96df-c7c7a6ac7fda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '90b87b42-1aac-4697-91e2-84193dd93b89', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 803.133210] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Creating folder: Project (2c423db834f544fe91eec4d04dec5a0b). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 803.134596] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-33a786b7-a6da-4aac-8235-df9ba5bcb703 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.140158] env[63538]: DEBUG nova.compute.manager [req-66c200df-4096-4371-9f2b-8f6f954ea9df req-b697d232-2404-425e-a9f4-e02f006a983c service nova] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Received event network-changed-90b87b42-1aac-4697-91e2-84193dd93b89 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 803.141018] env[63538]: DEBUG nova.compute.manager [req-66c200df-4096-4371-9f2b-8f6f954ea9df req-b697d232-2404-425e-a9f4-e02f006a983c service nova] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Refreshing instance network info cache due to event network-changed-90b87b42-1aac-4697-91e2-84193dd93b89. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 803.141018] env[63538]: DEBUG oslo_concurrency.lockutils [req-66c200df-4096-4371-9f2b-8f6f954ea9df req-b697d232-2404-425e-a9f4-e02f006a983c service nova] Acquiring lock "refresh_cache-5bf7ed57-62d5-4abc-96d8-78b979baed92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.141018] env[63538]: DEBUG oslo_concurrency.lockutils [req-66c200df-4096-4371-9f2b-8f6f954ea9df req-b697d232-2404-425e-a9f4-e02f006a983c service nova] Acquired lock "refresh_cache-5bf7ed57-62d5-4abc-96d8-78b979baed92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.143500] env[63538]: DEBUG nova.network.neutron [req-66c200df-4096-4371-9f2b-8f6f954ea9df req-b697d232-2404-425e-a9f4-e02f006a983c service nova] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Refreshing network info cache for port 90b87b42-1aac-4697-91e2-84193dd93b89 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 803.149239] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Created folder: Project (2c423db834f544fe91eec4d04dec5a0b) in parent group-v992234. [ 803.149239] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Creating folder: Instances. Parent ref: group-v992382. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 803.149239] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8aa594cb-61e1-40b2-9da5-4372f5d0b3ed {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.163977] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Created folder: Instances in parent group-v992382. [ 803.164404] env[63538]: DEBUG oslo.service.loopingcall [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 803.164677] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 803.167039] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c8944d7e-d7f3-45a7-9ed3-a13bb8d06e1a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.187525] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 803.187525] env[63538]: value = "task-5100935" [ 803.187525] env[63538]: _type = "Task" [ 803.187525] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.199065] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100935, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.239511] env[63538]: DEBUG nova.scheduler.client.report [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 803.390050] env[63538]: DEBUG nova.compute.manager [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 803.537598] env[63538]: DEBUG nova.compute.manager [req-1584b75f-af75-4802-a82f-21e58c56d87b req-85008986-1261-45e9-a96e-5765b1e2b1f1 service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] Received event network-vif-plugged-831b08d0-41bf-439d-8e03-090fc08a5815 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 803.538168] env[63538]: DEBUG oslo_concurrency.lockutils [req-1584b75f-af75-4802-a82f-21e58c56d87b req-85008986-1261-45e9-a96e-5765b1e2b1f1 service nova] Acquiring lock "6850191a-4190-4795-ae18-830b41a76085-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.538413] env[63538]: DEBUG oslo_concurrency.lockutils [req-1584b75f-af75-4802-a82f-21e58c56d87b req-85008986-1261-45e9-a96e-5765b1e2b1f1 service nova] Lock "6850191a-4190-4795-ae18-830b41a76085-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.538667] env[63538]: DEBUG oslo_concurrency.lockutils [req-1584b75f-af75-4802-a82f-21e58c56d87b req-85008986-1261-45e9-a96e-5765b1e2b1f1 service nova] Lock "6850191a-4190-4795-ae18-830b41a76085-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.539450] env[63538]: DEBUG nova.compute.manager [req-1584b75f-af75-4802-a82f-21e58c56d87b req-85008986-1261-45e9-a96e-5765b1e2b1f1 service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] No waiting events found dispatching network-vif-plugged-831b08d0-41bf-439d-8e03-090fc08a5815 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 803.539712] env[63538]: WARNING nova.compute.manager [req-1584b75f-af75-4802-a82f-21e58c56d87b req-85008986-1261-45e9-a96e-5765b1e2b1f1 service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] Received unexpected event network-vif-plugged-831b08d0-41bf-439d-8e03-090fc08a5815 for instance with vm_state building and task_state spawning. [ 803.540107] env[63538]: DEBUG nova.compute.manager [req-1584b75f-af75-4802-a82f-21e58c56d87b req-85008986-1261-45e9-a96e-5765b1e2b1f1 service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] Received event network-changed-831b08d0-41bf-439d-8e03-090fc08a5815 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 803.540535] env[63538]: DEBUG nova.compute.manager [req-1584b75f-af75-4802-a82f-21e58c56d87b req-85008986-1261-45e9-a96e-5765b1e2b1f1 service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] Refreshing instance network info cache due to event network-changed-831b08d0-41bf-439d-8e03-090fc08a5815. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 803.540982] env[63538]: DEBUG oslo_concurrency.lockutils [req-1584b75f-af75-4802-a82f-21e58c56d87b req-85008986-1261-45e9-a96e-5765b1e2b1f1 service nova] Acquiring lock "refresh_cache-6850191a-4190-4795-ae18-830b41a76085" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.540982] env[63538]: DEBUG oslo_concurrency.lockutils [req-1584b75f-af75-4802-a82f-21e58c56d87b req-85008986-1261-45e9-a96e-5765b1e2b1f1 service nova] Acquired lock "refresh_cache-6850191a-4190-4795-ae18-830b41a76085" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.541199] env[63538]: DEBUG nova.network.neutron [req-1584b75f-af75-4802-a82f-21e58c56d87b req-85008986-1261-45e9-a96e-5765b1e2b1f1 service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] Refreshing network info cache for port 831b08d0-41bf-439d-8e03-090fc08a5815 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 803.596037] env[63538]: DEBUG oslo_vmware.api [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Task: {'id': task-5100932, 'name': Rename_Task, 'duration_secs': 0.25873} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.596716] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 803.596716] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81974c3d-1d7c-4d20-a68a-e895485bb9be {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.606829] env[63538]: DEBUG oslo_vmware.api [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Waiting for the task: (returnval){ [ 803.606829] env[63538]: value = "task-5100936" [ 803.606829] env[63538]: _type = "Task" [ 803.606829] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.619187] env[63538]: DEBUG oslo_vmware.api [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Task: {'id': task-5100936, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.700355] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100935, 'name': CreateVM_Task, 'duration_secs': 0.318818} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.700658] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 803.701390] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.701745] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.702131] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 803.702349] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08546c1d-1462-4e06-ac00-29ee04448944 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.708047] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Waiting for the task: (returnval){ [ 803.708047] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520e61e1-b8bb-d34b-1551-0ad862f162c6" [ 803.708047] env[63538]: _type = "Task" [ 803.708047] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.719074] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520e61e1-b8bb-d34b-1551-0ad862f162c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.745894] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.826s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.746537] env[63538]: DEBUG nova.compute.manager [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 803.749395] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.651s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.750996] env[63538]: INFO nova.compute.claims [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 803.922566] env[63538]: DEBUG oslo_concurrency.lockutils [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.931787] env[63538]: DEBUG nova.network.neutron [req-66c200df-4096-4371-9f2b-8f6f954ea9df req-b697d232-2404-425e-a9f4-e02f006a983c service nova] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Updated VIF entry in instance network info cache for port 90b87b42-1aac-4697-91e2-84193dd93b89. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 803.931787] env[63538]: DEBUG nova.network.neutron [req-66c200df-4096-4371-9f2b-8f6f954ea9df req-b697d232-2404-425e-a9f4-e02f006a983c service nova] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Updating instance_info_cache with network_info: [{"id": "90b87b42-1aac-4697-91e2-84193dd93b89", "address": "fa:16:3e:1c:46:57", "network": {"id": "0d94690c-811c-4601-ba43-b1ed296c0a4d", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1543191410-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c423db834f544fe91eec4d04dec5a0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ee9f433-666e-4d74-96df-c7c7a6ac7fda", "external-id": "nsx-vlan-transportzone-499", "segmentation_id": 499, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90b87b42-1a", "ovs_interfaceid": "90b87b42-1aac-4697-91e2-84193dd93b89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.016845] env[63538]: DEBUG nova.network.neutron [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Successfully updated port: 384175ce-d150-4f04-ad8f-d65790b79e5e {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 804.098430] env[63538]: DEBUG nova.network.neutron [req-1584b75f-af75-4802-a82f-21e58c56d87b req-85008986-1261-45e9-a96e-5765b1e2b1f1 service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 804.120327] env[63538]: DEBUG oslo_vmware.api [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Task: {'id': task-5100936, 'name': PowerOnVM_Task, 'duration_secs': 0.456371} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.120706] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 804.120976] env[63538]: INFO nova.compute.manager [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Took 8.24 seconds to spawn the instance on the hypervisor. [ 804.121240] env[63538]: DEBUG nova.compute.manager [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 804.122253] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cb367d9-1c83-4897-a912-fa2049d3e79e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.222343] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520e61e1-b8bb-d34b-1551-0ad862f162c6, 'name': SearchDatastore_Task, 'duration_secs': 0.011708} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.222872] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.223304] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 804.223712] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.224051] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.224363] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 804.224784] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bd83b20e-326c-459b-908b-459266a255ab {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.236846] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 804.237456] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 804.237848] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ca5622d-025a-4efc-bdaa-5a1a6ee96745 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.244651] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Waiting for the task: (returnval){ [ 804.244651] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525364c3-4aaa-1c84-8531-024577fe7d80" [ 804.244651] env[63538]: _type = "Task" [ 804.244651] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.246019] env[63538]: DEBUG nova.network.neutron [req-1584b75f-af75-4802-a82f-21e58c56d87b req-85008986-1261-45e9-a96e-5765b1e2b1f1 service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.260973] env[63538]: DEBUG nova.compute.utils [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 804.262424] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525364c3-4aaa-1c84-8531-024577fe7d80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.264115] env[63538]: DEBUG nova.compute.manager [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 804.264115] env[63538]: DEBUG nova.network.neutron [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 804.337902] env[63538]: DEBUG nova.policy [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9e7850362eef47f1b623b6e004d60ade', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '55edcd65da7b4a569a4c27aab4819cde', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 804.434369] env[63538]: DEBUG oslo_concurrency.lockutils [req-66c200df-4096-4371-9f2b-8f6f954ea9df req-b697d232-2404-425e-a9f4-e02f006a983c service nova] Releasing lock "refresh_cache-5bf7ed57-62d5-4abc-96d8-78b979baed92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.519721] env[63538]: DEBUG oslo_concurrency.lockutils [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "refresh_cache-79f4cdd9-219a-4440-9dd2-9b2a360965b1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.520275] env[63538]: DEBUG oslo_concurrency.lockutils [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired lock "refresh_cache-79f4cdd9-219a-4440-9dd2-9b2a360965b1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.520275] env[63538]: DEBUG nova.network.neutron [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 804.640926] env[63538]: INFO nova.compute.manager [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Took 50.92 seconds to build instance. [ 804.694207] env[63538]: DEBUG nova.network.neutron [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Successfully updated port: 3700c423-faa4-4788-a282-65acd1bbfe9d {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 804.752879] env[63538]: DEBUG oslo_concurrency.lockutils [req-1584b75f-af75-4802-a82f-21e58c56d87b req-85008986-1261-45e9-a96e-5765b1e2b1f1 service nova] Releasing lock "refresh_cache-6850191a-4190-4795-ae18-830b41a76085" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.758714] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525364c3-4aaa-1c84-8531-024577fe7d80, 'name': SearchDatastore_Task, 'duration_secs': 0.013281} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.759585] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9286313-9854-458c-ab66-d3abbb6c5fba {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.767146] env[63538]: DEBUG nova.compute.manager [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 804.771543] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Waiting for the task: (returnval){ [ 804.771543] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5225fca0-cf02-ee75-677e-9fc2254fec99" [ 804.771543] env[63538]: _type = "Task" [ 804.771543] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.784713] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5225fca0-cf02-ee75-677e-9fc2254fec99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.814562] env[63538]: DEBUG nova.network.neutron [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Successfully created port: 6edf1839-d708-46ad-836d-e2ecac08730f {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 805.071829] env[63538]: DEBUG nova.network.neutron [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 805.143851] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d785e8-3707-43aa-bbcc-f5d08dde3a69 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Lock "f703cd1c-4b77-4a85-a91b-63a2bd0e84a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.286s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.203751] env[63538]: DEBUG oslo_concurrency.lockutils [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Acquiring lock "refresh_cache-6850191a-4190-4795-ae18-830b41a76085" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.203751] env[63538]: DEBUG oslo_concurrency.lockutils [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Acquired lock "refresh_cache-6850191a-4190-4795-ae18-830b41a76085" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.203751] env[63538]: DEBUG nova.network.neutron [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 805.292130] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5225fca0-cf02-ee75-677e-9fc2254fec99, 'name': SearchDatastore_Task, 'duration_secs': 0.025514} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.292130] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.292130] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 5bf7ed57-62d5-4abc-96d8-78b979baed92/5bf7ed57-62d5-4abc-96d8-78b979baed92.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 805.292130] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-07a26536-ed09-459e-ad3e-ad80d20a94b7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.300130] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Waiting for the task: (returnval){ [ 805.300130] env[63538]: value = "task-5100937" [ 805.300130] env[63538]: _type = "Task" [ 805.300130] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.316822] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Task: {'id': task-5100937, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.320441] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e93964-d220-4be0-9284-3c7f9a0209d9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.332381] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5423b59f-229c-4ccc-b1ba-e5ad5f5c15f5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.370670] env[63538]: DEBUG nova.network.neutron [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Updating instance_info_cache with network_info: [{"id": "384175ce-d150-4f04-ad8f-d65790b79e5e", "address": "fa:16:3e:f6:00:a2", "network": {"id": "4aa7976c-5900-4be2-a5e2-8d641ac24be9", "bridge": "br-int", "label": "tempest-ServersTestJSON-119331503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea05f3fb4676466bb2a286f5a2fefb8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap384175ce-d1", "ovs_interfaceid": "384175ce-d150-4f04-ad8f-d65790b79e5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.374031] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf890c9-e0c7-4875-a774-85dc2481bfb0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.382161] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2195379-9bd3-4052-9ad6-46afcc997063 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.403304] env[63538]: DEBUG nova.compute.provider_tree [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 805.419561] env[63538]: DEBUG nova.compute.manager [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Received event network-vif-plugged-384175ce-d150-4f04-ad8f-d65790b79e5e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 805.420520] env[63538]: DEBUG oslo_concurrency.lockutils [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] Acquiring lock "79f4cdd9-219a-4440-9dd2-9b2a360965b1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.420520] env[63538]: DEBUG oslo_concurrency.lockutils [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] Lock "79f4cdd9-219a-4440-9dd2-9b2a360965b1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.420520] env[63538]: DEBUG oslo_concurrency.lockutils [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] Lock "79f4cdd9-219a-4440-9dd2-9b2a360965b1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.420520] env[63538]: DEBUG nova.compute.manager [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] No waiting events found dispatching network-vif-plugged-384175ce-d150-4f04-ad8f-d65790b79e5e {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 805.420703] env[63538]: WARNING nova.compute.manager [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Received unexpected event network-vif-plugged-384175ce-d150-4f04-ad8f-d65790b79e5e for instance with vm_state building and task_state spawning. [ 805.420784] env[63538]: DEBUG nova.compute.manager [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Received event network-changed-0d48de93-8e4f-4795-a582-f00e76e60047 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 805.420946] env[63538]: DEBUG nova.compute.manager [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Refreshing instance network info cache due to event network-changed-0d48de93-8e4f-4795-a582-f00e76e60047. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 805.421219] env[63538]: DEBUG oslo_concurrency.lockutils [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] Acquiring lock "refresh_cache-d5d557c6-3d4e-4122-8756-218c9757fa01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.421431] env[63538]: DEBUG oslo_concurrency.lockutils [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] Acquired lock "refresh_cache-d5d557c6-3d4e-4122-8756-218c9757fa01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.421559] env[63538]: DEBUG nova.network.neutron [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Refreshing network info cache for port 0d48de93-8e4f-4795-a582-f00e76e60047 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 805.595474] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Acquiring lock "f703cd1c-4b77-4a85-a91b-63a2bd0e84a9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.595474] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Lock "f703cd1c-4b77-4a85-a91b-63a2bd0e84a9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.595474] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Acquiring lock "f703cd1c-4b77-4a85-a91b-63a2bd0e84a9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.595474] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Lock "f703cd1c-4b77-4a85-a91b-63a2bd0e84a9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.595474] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Lock "f703cd1c-4b77-4a85-a91b-63a2bd0e84a9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.598141] env[63538]: INFO nova.compute.manager [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Terminating instance [ 805.600030] env[63538]: DEBUG nova.compute.manager [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 805.600241] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 805.601106] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024b3a08-5478-4994-8ae2-1244d2466f00 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.609640] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 805.609977] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-93cb6d86-0620-4f0d-9b6c-3d41917d7b93 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.617988] env[63538]: DEBUG oslo_vmware.api [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Waiting for the task: (returnval){ [ 805.617988] env[63538]: value = "task-5100938" [ 805.617988] env[63538]: _type = "Task" [ 805.617988] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.628062] env[63538]: DEBUG oslo_vmware.api [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Task: {'id': task-5100938, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.647026] env[63538]: DEBUG nova.compute.manager [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 805.772188] env[63538]: DEBUG nova.network.neutron [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 805.785440] env[63538]: DEBUG nova.compute.manager [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 805.819892] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Task: {'id': task-5100937, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.835855] env[63538]: DEBUG nova.virt.hardware [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 805.835855] env[63538]: DEBUG nova.virt.hardware [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 805.835855] env[63538]: DEBUG nova.virt.hardware [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 805.835855] env[63538]: DEBUG nova.virt.hardware [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 805.835855] env[63538]: DEBUG nova.virt.hardware [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 805.835855] env[63538]: DEBUG nova.virt.hardware [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 805.835855] env[63538]: DEBUG nova.virt.hardware [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 805.835855] env[63538]: DEBUG nova.virt.hardware [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 805.835855] env[63538]: DEBUG nova.virt.hardware [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 805.835855] env[63538]: DEBUG nova.virt.hardware [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 805.835855] env[63538]: DEBUG nova.virt.hardware [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 805.839584] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33581c38-b8ae-4caf-a7ac-53c6a283191a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.851869] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea2724d-6130-48d1-91f4-f09d31f563ca {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.883164] env[63538]: DEBUG oslo_concurrency.lockutils [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Releasing lock "refresh_cache-79f4cdd9-219a-4440-9dd2-9b2a360965b1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.883164] env[63538]: DEBUG nova.compute.manager [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Instance network_info: |[{"id": "384175ce-d150-4f04-ad8f-d65790b79e5e", "address": "fa:16:3e:f6:00:a2", "network": {"id": "4aa7976c-5900-4be2-a5e2-8d641ac24be9", "bridge": "br-int", "label": "tempest-ServersTestJSON-119331503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea05f3fb4676466bb2a286f5a2fefb8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap384175ce-d1", "ovs_interfaceid": "384175ce-d150-4f04-ad8f-d65790b79e5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 805.883164] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f6:00:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '384175ce-d150-4f04-ad8f-d65790b79e5e', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 805.890368] env[63538]: DEBUG oslo.service.loopingcall [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 805.891222] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 805.891598] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-05a796a4-574a-4bae-a963-a4511ee9aad0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.909816] env[63538]: DEBUG nova.scheduler.client.report [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 805.920890] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 805.920890] env[63538]: value = "task-5100939" [ 805.920890] env[63538]: _type = "Task" [ 805.920890] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.933722] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100939, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.134331] env[63538]: DEBUG oslo_vmware.api [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Task: {'id': task-5100938, 'name': PowerOffVM_Task, 'duration_secs': 0.225167} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.134595] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 806.136302] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 806.136302] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e44895c8-1414-4301-93de-22fe4a97a0a6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.177065] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.232414] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 806.232727] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 806.232927] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Deleting the datastore file [datastore1] f703cd1c-4b77-4a85-a91b-63a2bd0e84a9 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 806.233250] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9bbe54cc-7838-47b2-a61d-54fd543aa8ca {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.243164] env[63538]: DEBUG oslo_vmware.api [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Waiting for the task: (returnval){ [ 806.243164] env[63538]: value = "task-5100941" [ 806.243164] env[63538]: _type = "Task" [ 806.243164] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.257820] env[63538]: DEBUG oslo_vmware.api [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Task: {'id': task-5100941, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.308617] env[63538]: DEBUG nova.network.neutron [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Updated VIF entry in instance network info cache for port 0d48de93-8e4f-4795-a582-f00e76e60047. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 806.308617] env[63538]: DEBUG nova.network.neutron [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Updating instance_info_cache with network_info: [{"id": "0d48de93-8e4f-4795-a582-f00e76e60047", "address": "fa:16:3e:ca:bf:a1", "network": {"id": "955bc6e2-cea0-4cf9-80fb-521ae0e565f0", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-726504029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9427981aac124f6aa0c4d8d45b0ae917", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c297fe21-cd0b-4226-813b-a65d2358d034", "external-id": "nsx-vlan-transportzone-98", "segmentation_id": 98, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d48de93-8e", "ovs_interfaceid": "0d48de93-8e4f-4795-a582-f00e76e60047", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.314772] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Task: {'id': task-5100937, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.72275} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.315581] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 5bf7ed57-62d5-4abc-96d8-78b979baed92/5bf7ed57-62d5-4abc-96d8-78b979baed92.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 806.315581] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 806.315760] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5ee660ef-1e82-4ea8-ac1f-dbed9b9c81c0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.324719] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Waiting for the task: (returnval){ [ 806.324719] env[63538]: value = "task-5100942" [ 806.324719] env[63538]: _type = "Task" [ 806.324719] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.340390] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Task: {'id': task-5100942, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.416334] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.667s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.416872] env[63538]: DEBUG nova.compute.manager [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 806.420062] env[63538]: DEBUG oslo_concurrency.lockutils [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.487s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.421477] env[63538]: INFO nova.compute.claims [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 806.434812] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100939, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.567328] env[63538]: DEBUG nova.network.neutron [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Updating instance_info_cache with network_info: [{"id": "377fafa3-5b12-4619-8d84-bf0b09188cd6", "address": "fa:16:3e:6f:23:e8", "network": {"id": "d9193295-f8fe-492a-8679-a6d265185bf7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-458833191", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6906dcd3e0074931bdbe4233fbc2bf95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90c863af-25e3-4fc6-a125-8baa7540298c", "external-id": "nsx-vlan-transportzone-50", "segmentation_id": 50, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap377fafa3-5b", "ovs_interfaceid": "377fafa3-5b12-4619-8d84-bf0b09188cd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "831b08d0-41bf-439d-8e03-090fc08a5815", "address": "fa:16:3e:1b:a8:c6", "network": {"id": "64fa2fd6-5205-48cc-97b3-186267bf4153", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-401350160", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "6906dcd3e0074931bdbe4233fbc2bf95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap831b08d0-41", "ovs_interfaceid": "831b08d0-41bf-439d-8e03-090fc08a5815", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3700c423-faa4-4788-a282-65acd1bbfe9d", "address": "fa:16:3e:99:12:3c", "network": {"id": "d9193295-f8fe-492a-8679-a6d265185bf7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-458833191", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.39", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6906dcd3e0074931bdbe4233fbc2bf95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90c863af-25e3-4fc6-a125-8baa7540298c", "external-id": "nsx-vlan-transportzone-50", "segmentation_id": 50, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3700c423-fa", "ovs_interfaceid": "3700c423-faa4-4788-a282-65acd1bbfe9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.754361] env[63538]: DEBUG oslo_vmware.api [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Task: {'id': task-5100941, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.763484] env[63538]: DEBUG nova.network.neutron [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Successfully updated port: 6edf1839-d708-46ad-836d-e2ecac08730f {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 806.809873] env[63538]: DEBUG oslo_concurrency.lockutils [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] Releasing lock "refresh_cache-d5d557c6-3d4e-4122-8756-218c9757fa01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.810190] env[63538]: DEBUG nova.compute.manager [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Received event network-changed-384175ce-d150-4f04-ad8f-d65790b79e5e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 806.810379] env[63538]: DEBUG nova.compute.manager [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Refreshing instance network info cache due to event network-changed-384175ce-d150-4f04-ad8f-d65790b79e5e. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 806.810669] env[63538]: DEBUG oslo_concurrency.lockutils [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] Acquiring lock "refresh_cache-79f4cdd9-219a-4440-9dd2-9b2a360965b1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 806.810828] env[63538]: DEBUG oslo_concurrency.lockutils [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] Acquired lock "refresh_cache-79f4cdd9-219a-4440-9dd2-9b2a360965b1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.811012] env[63538]: DEBUG nova.network.neutron [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Refreshing network info cache for port 384175ce-d150-4f04-ad8f-d65790b79e5e {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 806.840111] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Task: {'id': task-5100942, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.361635} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.840516] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 806.841453] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93ae5aa5-9f03-4a3f-835f-b1b236ce5968 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.867974] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] 5bf7ed57-62d5-4abc-96d8-78b979baed92/5bf7ed57-62d5-4abc-96d8-78b979baed92.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 806.868779] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c2f4633-0645-4d02-a2ba-2e116a3ddf3a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.890731] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Waiting for the task: (returnval){ [ 806.890731] env[63538]: value = "task-5100943" [ 806.890731] env[63538]: _type = "Task" [ 806.890731] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.899818] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Task: {'id': task-5100943, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.929127] env[63538]: DEBUG nova.compute.utils [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 806.930909] env[63538]: DEBUG nova.compute.manager [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 806.931157] env[63538]: DEBUG nova.network.neutron [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 806.945774] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100939, 'name': CreateVM_Task, 'duration_secs': 0.51773} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.945774] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 806.946429] env[63538]: DEBUG oslo_concurrency.lockutils [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 806.946561] env[63538]: DEBUG oslo_concurrency.lockutils [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.946943] env[63538]: DEBUG oslo_concurrency.lockutils [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 806.947600] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fb47eb5-120d-4cea-a981-41df53868124 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.953611] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 806.953611] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525a023f-675d-02d5-7067-e0aadd463208" [ 806.953611] env[63538]: _type = "Task" [ 806.953611] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.966889] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525a023f-675d-02d5-7067-e0aadd463208, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.014268] env[63538]: DEBUG nova.policy [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9e7850362eef47f1b623b6e004d60ade', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '55edcd65da7b4a569a4c27aab4819cde', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 807.070996] env[63538]: DEBUG oslo_concurrency.lockutils [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Releasing lock "refresh_cache-6850191a-4190-4795-ae18-830b41a76085" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 807.071473] env[63538]: DEBUG nova.compute.manager [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Instance network_info: |[{"id": "377fafa3-5b12-4619-8d84-bf0b09188cd6", "address": "fa:16:3e:6f:23:e8", "network": {"id": "d9193295-f8fe-492a-8679-a6d265185bf7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-458833191", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6906dcd3e0074931bdbe4233fbc2bf95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90c863af-25e3-4fc6-a125-8baa7540298c", "external-id": "nsx-vlan-transportzone-50", "segmentation_id": 50, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap377fafa3-5b", "ovs_interfaceid": "377fafa3-5b12-4619-8d84-bf0b09188cd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "831b08d0-41bf-439d-8e03-090fc08a5815", "address": "fa:16:3e:1b:a8:c6", "network": {"id": "64fa2fd6-5205-48cc-97b3-186267bf4153", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-401350160", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "6906dcd3e0074931bdbe4233fbc2bf95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap831b08d0-41", "ovs_interfaceid": "831b08d0-41bf-439d-8e03-090fc08a5815", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3700c423-faa4-4788-a282-65acd1bbfe9d", "address": "fa:16:3e:99:12:3c", "network": {"id": "d9193295-f8fe-492a-8679-a6d265185bf7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-458833191", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.39", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6906dcd3e0074931bdbe4233fbc2bf95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90c863af-25e3-4fc6-a125-8baa7540298c", "external-id": "nsx-vlan-transportzone-50", "segmentation_id": 50, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3700c423-fa", "ovs_interfaceid": "3700c423-faa4-4788-a282-65acd1bbfe9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 807.072332] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:23:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '90c863af-25e3-4fc6-a125-8baa7540298c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '377fafa3-5b12-4619-8d84-bf0b09188cd6', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:1b:a8:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78e1ebb0-0130-446b-bf73-a0e59bbb95cc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '831b08d0-41bf-439d-8e03-090fc08a5815', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:12:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '90c863af-25e3-4fc6-a125-8baa7540298c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3700c423-faa4-4788-a282-65acd1bbfe9d', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 807.085146] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Creating folder: Project (6906dcd3e0074931bdbe4233fbc2bf95). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 807.085146] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1832da8d-8bb0-4d53-811d-d5f5405c8695 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.097473] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Created folder: Project (6906dcd3e0074931bdbe4233fbc2bf95) in parent group-v992234. [ 807.097829] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Creating folder: Instances. Parent ref: group-v992386. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 807.099729] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-264410eb-8da4-4e28-aaaf-6e690aff526a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.110440] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Created folder: Instances in parent group-v992386. [ 807.110818] env[63538]: DEBUG oslo.service.loopingcall [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 807.111142] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6850191a-4190-4795-ae18-830b41a76085] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 807.111506] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fc014099-4eed-4fd2-9c7c-5cca282dd52e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.138747] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 807.138747] env[63538]: value = "task-5100946" [ 807.138747] env[63538]: _type = "Task" [ 807.138747] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.150677] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100946, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.258577] env[63538]: DEBUG oslo_vmware.api [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Task: {'id': task-5100941, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.693666} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.258994] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 807.259228] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 807.259563] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 807.259925] env[63538]: INFO nova.compute.manager [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Took 1.66 seconds to destroy the instance on the hypervisor. [ 807.260302] env[63538]: DEBUG oslo.service.loopingcall [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 807.261624] env[63538]: DEBUG nova.compute.manager [-] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 807.261624] env[63538]: DEBUG nova.network.neutron [-] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 807.266657] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "refresh_cache-87f8bb3e-6f32-4850-ac54-efad0befb268" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.267285] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquired lock "refresh_cache-87f8bb3e-6f32-4850-ac54-efad0befb268" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.267285] env[63538]: DEBUG nova.network.neutron [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 807.404710] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Task: {'id': task-5100943, 'name': ReconfigVM_Task, 'duration_secs': 0.279604} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.407445] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Reconfigured VM instance instance-00000037 to attach disk [datastore1] 5bf7ed57-62d5-4abc-96d8-78b979baed92/5bf7ed57-62d5-4abc-96d8-78b979baed92.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 807.408127] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d59cbcf9-48c4-48c5-9be7-c8ea66a6747a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.416561] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Waiting for the task: (returnval){ [ 807.416561] env[63538]: value = "task-5100947" [ 807.416561] env[63538]: _type = "Task" [ 807.416561] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.426686] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Task: {'id': task-5100947, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.438438] env[63538]: DEBUG nova.compute.manager [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 807.468430] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525a023f-675d-02d5-7067-e0aadd463208, 'name': SearchDatastore_Task, 'duration_secs': 0.018555} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.469322] env[63538]: DEBUG oslo_concurrency.lockutils [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 807.469322] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 807.469505] env[63538]: DEBUG oslo_concurrency.lockutils [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.469695] env[63538]: DEBUG oslo_concurrency.lockutils [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.469918] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 807.470240] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e789a2ae-e72e-4ad0-aa5d-20530eb7b504 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.482844] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 807.483278] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 807.484270] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1428f2f3-d1e9-412b-ac8e-010d60e971e1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.494192] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 807.494192] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52710b26-7580-29c5-bd55-565a261988f8" [ 807.494192] env[63538]: _type = "Task" [ 807.494192] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.501463] env[63538]: DEBUG oslo_concurrency.lockutils [None req-12f201f9-7dd1-47df-be2c-958fb976016b tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "466be7db-79e4-49fd-aa3b-56fbe5c60457" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.501789] env[63538]: DEBUG oslo_concurrency.lockutils [None req-12f201f9-7dd1-47df-be2c-958fb976016b tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "466be7db-79e4-49fd-aa3b-56fbe5c60457" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.501928] env[63538]: DEBUG nova.compute.manager [None req-12f201f9-7dd1-47df-be2c-958fb976016b tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 807.503748] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7736e9ee-4990-4726-bcbc-1df18295a6b0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.513876] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52710b26-7580-29c5-bd55-565a261988f8, 'name': SearchDatastore_Task, 'duration_secs': 0.013036} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.516228] env[63538]: DEBUG nova.compute.manager [None req-12f201f9-7dd1-47df-be2c-958fb976016b tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63538) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 807.516924] env[63538]: DEBUG nova.objects.instance [None req-12f201f9-7dd1-47df-be2c-958fb976016b tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lazy-loading 'flavor' on Instance uuid 466be7db-79e4-49fd-aa3b-56fbe5c60457 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 807.523048] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcb20c54-a9a2-4ea4-86a9-d0942390391a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.540116] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 807.540116] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5277a542-8c2d-4517-54f8-c4513a69fd2c" [ 807.540116] env[63538]: _type = "Task" [ 807.540116] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.547433] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5277a542-8c2d-4517-54f8-c4513a69fd2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.561575] env[63538]: DEBUG nova.compute.manager [req-d84db8b3-a287-4ff6-8c0d-e3875cabde18 req-12d22763-d0c7-4aac-8c59-0356f9f4f509 service nova] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Received event network-vif-plugged-6edf1839-d708-46ad-836d-e2ecac08730f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 807.561817] env[63538]: DEBUG oslo_concurrency.lockutils [req-d84db8b3-a287-4ff6-8c0d-e3875cabde18 req-12d22763-d0c7-4aac-8c59-0356f9f4f509 service nova] Acquiring lock "87f8bb3e-6f32-4850-ac54-efad0befb268-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.562065] env[63538]: DEBUG oslo_concurrency.lockutils [req-d84db8b3-a287-4ff6-8c0d-e3875cabde18 req-12d22763-d0c7-4aac-8c59-0356f9f4f509 service nova] Lock "87f8bb3e-6f32-4850-ac54-efad0befb268-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.562247] env[63538]: DEBUG oslo_concurrency.lockutils [req-d84db8b3-a287-4ff6-8c0d-e3875cabde18 req-12d22763-d0c7-4aac-8c59-0356f9f4f509 service nova] Lock "87f8bb3e-6f32-4850-ac54-efad0befb268-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.562428] env[63538]: DEBUG nova.compute.manager [req-d84db8b3-a287-4ff6-8c0d-e3875cabde18 req-12d22763-d0c7-4aac-8c59-0356f9f4f509 service nova] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] No waiting events found dispatching network-vif-plugged-6edf1839-d708-46ad-836d-e2ecac08730f {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 807.562600] env[63538]: WARNING nova.compute.manager [req-d84db8b3-a287-4ff6-8c0d-e3875cabde18 req-12d22763-d0c7-4aac-8c59-0356f9f4f509 service nova] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Received unexpected event network-vif-plugged-6edf1839-d708-46ad-836d-e2ecac08730f for instance with vm_state building and task_state spawning. [ 807.562773] env[63538]: DEBUG nova.compute.manager [req-d84db8b3-a287-4ff6-8c0d-e3875cabde18 req-12d22763-d0c7-4aac-8c59-0356f9f4f509 service nova] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Received event network-changed-6edf1839-d708-46ad-836d-e2ecac08730f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 807.562928] env[63538]: DEBUG nova.compute.manager [req-d84db8b3-a287-4ff6-8c0d-e3875cabde18 req-12d22763-d0c7-4aac-8c59-0356f9f4f509 service nova] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Refreshing instance network info cache due to event network-changed-6edf1839-d708-46ad-836d-e2ecac08730f. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 807.563177] env[63538]: DEBUG oslo_concurrency.lockutils [req-d84db8b3-a287-4ff6-8c0d-e3875cabde18 req-12d22763-d0c7-4aac-8c59-0356f9f4f509 service nova] Acquiring lock "refresh_cache-87f8bb3e-6f32-4850-ac54-efad0befb268" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.651522] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100946, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.652537] env[63538]: DEBUG nova.network.neutron [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Updated VIF entry in instance network info cache for port 384175ce-d150-4f04-ad8f-d65790b79e5e. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 807.653364] env[63538]: DEBUG nova.network.neutron [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Updating instance_info_cache with network_info: [{"id": "384175ce-d150-4f04-ad8f-d65790b79e5e", "address": "fa:16:3e:f6:00:a2", "network": {"id": "4aa7976c-5900-4be2-a5e2-8d641ac24be9", "bridge": "br-int", "label": "tempest-ServersTestJSON-119331503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea05f3fb4676466bb2a286f5a2fefb8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap384175ce-d1", "ovs_interfaceid": "384175ce-d150-4f04-ad8f-d65790b79e5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.814506] env[63538]: DEBUG nova.network.neutron [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 807.819706] env[63538]: DEBUG nova.network.neutron [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Successfully created port: ca7d9759-5177-4ea2-a411-3bae4181182d {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 807.934686] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Task: {'id': task-5100947, 'name': Rename_Task, 'duration_secs': 0.170931} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.934686] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 807.934686] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f152f06-a42e-4ba2-b13c-3bd704065f59 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.948896] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Waiting for the task: (returnval){ [ 807.948896] env[63538]: value = "task-5100948" [ 807.948896] env[63538]: _type = "Task" [ 807.948896] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.960296] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Task: {'id': task-5100948, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.037105] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-12f201f9-7dd1-47df-be2c-958fb976016b tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 808.037516] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e36e88a7-f41d-442f-ad02-00d9248cc414 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.060044] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5277a542-8c2d-4517-54f8-c4513a69fd2c, 'name': SearchDatastore_Task, 'duration_secs': 0.014108} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.065536] env[63538]: DEBUG nova.network.neutron [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Updating instance_info_cache with network_info: [{"id": "6edf1839-d708-46ad-836d-e2ecac08730f", "address": "fa:16:3e:0c:3b:b0", "network": {"id": "549f45d6-3d4f-4476-92ba-4bd87fefba51", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-822672880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55edcd65da7b4a569a4c27aab4819cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ded8bac-871f-491b-94ec-cb67c08bc828", "external-id": "nsx-vlan-transportzone-212", "segmentation_id": 212, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6edf1839-d7", "ovs_interfaceid": "6edf1839-d708-46ad-836d-e2ecac08730f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.067972] env[63538]: DEBUG oslo_concurrency.lockutils [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.068366] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 79f4cdd9-219a-4440-9dd2-9b2a360965b1/79f4cdd9-219a-4440-9dd2-9b2a360965b1.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 808.068756] env[63538]: DEBUG oslo_vmware.api [None req-12f201f9-7dd1-47df-be2c-958fb976016b tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 808.068756] env[63538]: value = "task-5100949" [ 808.068756] env[63538]: _type = "Task" [ 808.068756] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.070198] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-87425a48-d17b-418d-9cd4-768f51d264bd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.082581] env[63538]: DEBUG oslo_vmware.api [None req-12f201f9-7dd1-47df-be2c-958fb976016b tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5100949, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.084683] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 808.084683] env[63538]: value = "task-5100950" [ 808.084683] env[63538]: _type = "Task" [ 808.084683] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.099131] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100950, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.122952] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df3123e-837d-47a1-93ef-8ae08d0700ec {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.132197] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19155db1-65b0-4c40-a8de-7974a915ff4a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.169302] env[63538]: DEBUG oslo_concurrency.lockutils [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] Releasing lock "refresh_cache-79f4cdd9-219a-4440-9dd2-9b2a360965b1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.169769] env[63538]: DEBUG nova.compute.manager [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] Received event network-vif-plugged-3700c423-faa4-4788-a282-65acd1bbfe9d {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 808.169868] env[63538]: DEBUG oslo_concurrency.lockutils [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] Acquiring lock "6850191a-4190-4795-ae18-830b41a76085-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.170026] env[63538]: DEBUG oslo_concurrency.lockutils [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] Lock "6850191a-4190-4795-ae18-830b41a76085-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.170205] env[63538]: DEBUG oslo_concurrency.lockutils [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] Lock "6850191a-4190-4795-ae18-830b41a76085-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.170383] env[63538]: DEBUG nova.compute.manager [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] No waiting events found dispatching network-vif-plugged-3700c423-faa4-4788-a282-65acd1bbfe9d {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 808.170555] env[63538]: WARNING nova.compute.manager [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] Received unexpected event network-vif-plugged-3700c423-faa4-4788-a282-65acd1bbfe9d for instance with vm_state building and task_state spawning. [ 808.170724] env[63538]: DEBUG nova.compute.manager [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] Received event network-changed-3700c423-faa4-4788-a282-65acd1bbfe9d {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 808.170886] env[63538]: DEBUG nova.compute.manager [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] Refreshing instance network info cache due to event network-changed-3700c423-faa4-4788-a282-65acd1bbfe9d. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 808.171101] env[63538]: DEBUG oslo_concurrency.lockutils [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] Acquiring lock "refresh_cache-6850191a-4190-4795-ae18-830b41a76085" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.171270] env[63538]: DEBUG oslo_concurrency.lockutils [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] Acquired lock "refresh_cache-6850191a-4190-4795-ae18-830b41a76085" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.171618] env[63538]: DEBUG nova.network.neutron [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] Refreshing network info cache for port 3700c423-faa4-4788-a282-65acd1bbfe9d {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 808.178062] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d48a31d-3724-4437-b643-341b576fc95f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.187705] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100946, 'name': CreateVM_Task, 'duration_secs': 0.519028} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.190701] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6850191a-4190-4795-ae18-830b41a76085] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 808.192365] env[63538]: DEBUG oslo_concurrency.lockutils [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.192567] env[63538]: DEBUG oslo_concurrency.lockutils [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.192950] env[63538]: DEBUG oslo_concurrency.lockutils [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 808.194349] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaf3d956-64fd-4334-9c12-a3897d20e11f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.199103] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a0d44d1-8ab4-420b-9699-cf14a25a4e22 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.206724] env[63538]: DEBUG oslo_vmware.api [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Waiting for the task: (returnval){ [ 808.206724] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5261a4fd-d197-a9bb-b625-298f5a8a3c98" [ 808.206724] env[63538]: _type = "Task" [ 808.206724] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.216830] env[63538]: DEBUG nova.compute.provider_tree [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 808.230359] env[63538]: DEBUG oslo_vmware.api [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5261a4fd-d197-a9bb-b625-298f5a8a3c98, 'name': SearchDatastore_Task, 'duration_secs': 0.014187} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.230752] env[63538]: DEBUG oslo_concurrency.lockutils [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.231038] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 808.233160] env[63538]: DEBUG oslo_concurrency.lockutils [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.233160] env[63538]: DEBUG oslo_concurrency.lockutils [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.233160] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 808.233160] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8da9b9fb-3e59-451f-9620-da77e28dc048 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.242958] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 808.243265] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 808.244154] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07077eba-c6eb-4a1c-ad5c-f37c88f64216 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.251117] env[63538]: DEBUG oslo_vmware.api [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Waiting for the task: (returnval){ [ 808.251117] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a893ef-bf33-534d-ddfb-8965584d03ab" [ 808.251117] env[63538]: _type = "Task" [ 808.251117] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.268111] env[63538]: DEBUG oslo_vmware.api [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a893ef-bf33-534d-ddfb-8965584d03ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.448018] env[63538]: DEBUG nova.compute.manager [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 808.466589] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Task: {'id': task-5100948, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.487609] env[63538]: DEBUG nova.virt.hardware [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 808.488037] env[63538]: DEBUG nova.virt.hardware [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 808.488371] env[63538]: DEBUG nova.virt.hardware [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 808.488972] env[63538]: DEBUG nova.virt.hardware [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 808.488972] env[63538]: DEBUG nova.virt.hardware [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 808.489097] env[63538]: DEBUG nova.virt.hardware [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 808.489687] env[63538]: DEBUG nova.virt.hardware [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 808.489687] env[63538]: DEBUG nova.virt.hardware [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 808.489922] env[63538]: DEBUG nova.virt.hardware [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 808.490201] env[63538]: DEBUG nova.virt.hardware [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 808.490411] env[63538]: DEBUG nova.virt.hardware [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 808.490902] env[63538]: DEBUG nova.network.neutron [-] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.493299] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3638c4b5-8c83-481e-b77e-549a481241d7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.504813] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50735b65-76b4-4d3b-acdc-2c5d0bf9bdb8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.570993] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Releasing lock "refresh_cache-87f8bb3e-6f32-4850-ac54-efad0befb268" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.571503] env[63538]: DEBUG nova.compute.manager [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Instance network_info: |[{"id": "6edf1839-d708-46ad-836d-e2ecac08730f", "address": "fa:16:3e:0c:3b:b0", "network": {"id": "549f45d6-3d4f-4476-92ba-4bd87fefba51", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-822672880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55edcd65da7b4a569a4c27aab4819cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ded8bac-871f-491b-94ec-cb67c08bc828", "external-id": "nsx-vlan-transportzone-212", "segmentation_id": 212, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6edf1839-d7", "ovs_interfaceid": "6edf1839-d708-46ad-836d-e2ecac08730f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 808.571906] env[63538]: DEBUG oslo_concurrency.lockutils [req-d84db8b3-a287-4ff6-8c0d-e3875cabde18 req-12d22763-d0c7-4aac-8c59-0356f9f4f509 service nova] Acquired lock "refresh_cache-87f8bb3e-6f32-4850-ac54-efad0befb268" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.572254] env[63538]: DEBUG nova.network.neutron [req-d84db8b3-a287-4ff6-8c0d-e3875cabde18 req-12d22763-d0c7-4aac-8c59-0356f9f4f509 service nova] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Refreshing network info cache for port 6edf1839-d708-46ad-836d-e2ecac08730f {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 808.580137] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:3b:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ded8bac-871f-491b-94ec-cb67c08bc828', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6edf1839-d708-46ad-836d-e2ecac08730f', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 808.588848] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Creating folder: Project (55edcd65da7b4a569a4c27aab4819cde). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 808.593523] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e918a724-82f6-4f46-8eec-d37b77154e39 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.606356] env[63538]: DEBUG oslo_vmware.api [None req-12f201f9-7dd1-47df-be2c-958fb976016b tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5100949, 'name': PowerOffVM_Task, 'duration_secs': 0.227664} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.612022] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-12f201f9-7dd1-47df-be2c-958fb976016b tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 808.612022] env[63538]: DEBUG nova.compute.manager [None req-12f201f9-7dd1-47df-be2c-958fb976016b tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 808.612022] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100950, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.612547] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc138421-1a84-4948-b932-8706a135f93e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.619021] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Created folder: Project (55edcd65da7b4a569a4c27aab4819cde) in parent group-v992234. [ 808.619021] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Creating folder: Instances. Parent ref: group-v992389. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 808.619021] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-70d38c25-4228-4baf-857a-396670089e29 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.630990] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Created folder: Instances in parent group-v992389. [ 808.631325] env[63538]: DEBUG oslo.service.loopingcall [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 808.631602] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 808.631952] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9ea12ac-5a37-4220-b431-35df1715286e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.654230] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 808.654230] env[63538]: value = "task-5100953" [ 808.654230] env[63538]: _type = "Task" [ 808.654230] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.663975] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100953, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.724243] env[63538]: DEBUG nova.scheduler.client.report [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 808.765987] env[63538]: DEBUG oslo_vmware.api [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a893ef-bf33-534d-ddfb-8965584d03ab, 'name': SearchDatastore_Task, 'duration_secs': 0.071236} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.767011] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2c3dea3-cdfa-4711-9dd8-0061c6d79ee2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.773670] env[63538]: DEBUG oslo_vmware.api [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Waiting for the task: (returnval){ [ 808.773670] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5273f0bf-9b87-a371-1c28-b05f322892da" [ 808.773670] env[63538]: _type = "Task" [ 808.773670] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.783416] env[63538]: DEBUG oslo_vmware.api [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5273f0bf-9b87-a371-1c28-b05f322892da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.968032] env[63538]: DEBUG oslo_vmware.api [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Task: {'id': task-5100948, 'name': PowerOnVM_Task, 'duration_secs': 0.546651} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.968032] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 808.968032] env[63538]: INFO nova.compute.manager [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Took 8.75 seconds to spawn the instance on the hypervisor. [ 808.968032] env[63538]: DEBUG nova.compute.manager [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 808.968032] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-864d30f2-7f96-4d12-98cc-6b9d836ae96d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.999973] env[63538]: INFO nova.compute.manager [-] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Took 1.74 seconds to deallocate network for instance. [ 809.050296] env[63538]: DEBUG nova.network.neutron [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] Updated VIF entry in instance network info cache for port 3700c423-faa4-4788-a282-65acd1bbfe9d. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 809.051049] env[63538]: DEBUG nova.network.neutron [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] Updating instance_info_cache with network_info: [{"id": "377fafa3-5b12-4619-8d84-bf0b09188cd6", "address": "fa:16:3e:6f:23:e8", "network": {"id": "d9193295-f8fe-492a-8679-a6d265185bf7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-458833191", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6906dcd3e0074931bdbe4233fbc2bf95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90c863af-25e3-4fc6-a125-8baa7540298c", "external-id": "nsx-vlan-transportzone-50", "segmentation_id": 50, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap377fafa3-5b", "ovs_interfaceid": "377fafa3-5b12-4619-8d84-bf0b09188cd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "831b08d0-41bf-439d-8e03-090fc08a5815", "address": "fa:16:3e:1b:a8:c6", "network": {"id": "64fa2fd6-5205-48cc-97b3-186267bf4153", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-401350160", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "6906dcd3e0074931bdbe4233fbc2bf95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap831b08d0-41", "ovs_interfaceid": "831b08d0-41bf-439d-8e03-090fc08a5815", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3700c423-faa4-4788-a282-65acd1bbfe9d", "address": "fa:16:3e:99:12:3c", "network": {"id": "d9193295-f8fe-492a-8679-a6d265185bf7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-458833191", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.39", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6906dcd3e0074931bdbe4233fbc2bf95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90c863af-25e3-4fc6-a125-8baa7540298c", "external-id": "nsx-vlan-transportzone-50", "segmentation_id": 50, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3700c423-fa", "ovs_interfaceid": "3700c423-faa4-4788-a282-65acd1bbfe9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.105406] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100950, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.564866} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.105697] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 79f4cdd9-219a-4440-9dd2-9b2a360965b1/79f4cdd9-219a-4440-9dd2-9b2a360965b1.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 809.105921] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 809.106227] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c7361b4f-a13c-46b7-bae0-a0e4ed833d21 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.114885] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 809.114885] env[63538]: value = "task-5100954" [ 809.114885] env[63538]: _type = "Task" [ 809.114885] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.129600] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100954, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.133062] env[63538]: DEBUG oslo_concurrency.lockutils [None req-12f201f9-7dd1-47df-be2c-958fb976016b tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "466be7db-79e4-49fd-aa3b-56fbe5c60457" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.631s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.168055] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100953, 'name': CreateVM_Task, 'duration_secs': 0.372908} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.168055] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 809.168055] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.168402] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.168533] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 809.168810] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e7cf813-e5b4-4c0b-bb11-770214a7b443 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.175150] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 809.175150] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52281941-176e-2e7f-a78e-bf820d040fb7" [ 809.175150] env[63538]: _type = "Task" [ 809.175150] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.184859] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52281941-176e-2e7f-a78e-bf820d040fb7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.229692] env[63538]: DEBUG oslo_concurrency.lockutils [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.810s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.230283] env[63538]: DEBUG nova.compute.manager [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 809.233013] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 37.680s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.233222] env[63538]: DEBUG nova.objects.instance [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63538) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 809.286034] env[63538]: DEBUG oslo_vmware.api [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5273f0bf-9b87-a371-1c28-b05f322892da, 'name': SearchDatastore_Task, 'duration_secs': 0.028131} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.286389] env[63538]: DEBUG oslo_concurrency.lockutils [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 809.286674] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 6850191a-4190-4795-ae18-830b41a76085/6850191a-4190-4795-ae18-830b41a76085.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 809.286963] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ad102f51-a9e5-48dd-8905-f76b47eb0512 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.299031] env[63538]: DEBUG oslo_vmware.api [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Waiting for the task: (returnval){ [ 809.299031] env[63538]: value = "task-5100955" [ 809.299031] env[63538]: _type = "Task" [ 809.299031] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.306893] env[63538]: DEBUG oslo_vmware.api [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5100955, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.488428] env[63538]: INFO nova.compute.manager [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Took 53.24 seconds to build instance. [ 809.507675] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.510520] env[63538]: DEBUG nova.network.neutron [req-d84db8b3-a287-4ff6-8c0d-e3875cabde18 req-12d22763-d0c7-4aac-8c59-0356f9f4f509 service nova] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Updated VIF entry in instance network info cache for port 6edf1839-d708-46ad-836d-e2ecac08730f. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 809.511298] env[63538]: DEBUG nova.network.neutron [req-d84db8b3-a287-4ff6-8c0d-e3875cabde18 req-12d22763-d0c7-4aac-8c59-0356f9f4f509 service nova] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Updating instance_info_cache with network_info: [{"id": "6edf1839-d708-46ad-836d-e2ecac08730f", "address": "fa:16:3e:0c:3b:b0", "network": {"id": "549f45d6-3d4f-4476-92ba-4bd87fefba51", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-822672880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55edcd65da7b4a569a4c27aab4819cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ded8bac-871f-491b-94ec-cb67c08bc828", "external-id": "nsx-vlan-transportzone-212", "segmentation_id": 212, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6edf1839-d7", "ovs_interfaceid": "6edf1839-d708-46ad-836d-e2ecac08730f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.554372] env[63538]: DEBUG oslo_concurrency.lockutils [req-63c8ceef-b16a-4c1d-8276-a79d98f70fdc req-a5383662-323b-4819-98ef-c9c6c0229919 service nova] Releasing lock "refresh_cache-6850191a-4190-4795-ae18-830b41a76085" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 809.631903] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100954, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086258} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.631903] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 809.631903] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc8e4a86-8077-48e4-b69e-e3b63d12df41 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.671407] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 79f4cdd9-219a-4440-9dd2-9b2a360965b1/79f4cdd9-219a-4440-9dd2-9b2a360965b1.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 809.672181] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-065ac733-3bd6-45a9-82fa-7f7ebe5f92af {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.700651] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52281941-176e-2e7f-a78e-bf820d040fb7, 'name': SearchDatastore_Task, 'duration_secs': 0.013901} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.702546] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 809.702894] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 809.703113] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.703281] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.703414] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 809.703767] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 809.703767] env[63538]: value = "task-5100956" [ 809.703767] env[63538]: _type = "Task" [ 809.703767] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.704040] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-948b8b36-87c3-48b6-b1d1-363593434080 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.715283] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100956, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.720968] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 809.722068] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 809.722370] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6035fa41-4f76-4d5c-87ce-a6007aff8981 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.730246] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 809.730246] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52afb387-408e-9908-b47e-4a41c72fffc9" [ 809.730246] env[63538]: _type = "Task" [ 809.730246] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.740029] env[63538]: DEBUG nova.compute.utils [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 809.746734] env[63538]: DEBUG nova.compute.manager [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 809.746840] env[63538]: DEBUG nova.network.neutron [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 809.749591] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52afb387-408e-9908-b47e-4a41c72fffc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.775517] env[63538]: DEBUG nova.network.neutron [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Successfully updated port: ca7d9759-5177-4ea2-a411-3bae4181182d {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 809.806519] env[63538]: DEBUG oslo_vmware.api [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5100955, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.808615] env[63538]: DEBUG nova.policy [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '21f98d3c77454d809c0aa1ca5c7dc6d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '492427e54e1048f292dab2abdac71af5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 809.853643] env[63538]: DEBUG nova.compute.manager [req-a3a84f62-7493-49b0-b7b2-ceb7fbaa041f req-43231891-d8f5-41bd-aeb9-350f1a95dc40 service nova] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Received event network-vif-deleted-14cf1960-9e0d-41c9-b9c1-44ff70d859e3 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 809.854200] env[63538]: DEBUG nova.compute.manager [req-a3a84f62-7493-49b0-b7b2-ceb7fbaa041f req-43231891-d8f5-41bd-aeb9-350f1a95dc40 service nova] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Received event network-vif-plugged-ca7d9759-5177-4ea2-a411-3bae4181182d {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 809.854600] env[63538]: DEBUG oslo_concurrency.lockutils [req-a3a84f62-7493-49b0-b7b2-ceb7fbaa041f req-43231891-d8f5-41bd-aeb9-350f1a95dc40 service nova] Acquiring lock "de68a921-bf67-4794-923d-4e062d8ff802-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.854758] env[63538]: DEBUG oslo_concurrency.lockutils [req-a3a84f62-7493-49b0-b7b2-ceb7fbaa041f req-43231891-d8f5-41bd-aeb9-350f1a95dc40 service nova] Lock "de68a921-bf67-4794-923d-4e062d8ff802-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.854952] env[63538]: DEBUG oslo_concurrency.lockutils [req-a3a84f62-7493-49b0-b7b2-ceb7fbaa041f req-43231891-d8f5-41bd-aeb9-350f1a95dc40 service nova] Lock "de68a921-bf67-4794-923d-4e062d8ff802-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.855544] env[63538]: DEBUG nova.compute.manager [req-a3a84f62-7493-49b0-b7b2-ceb7fbaa041f req-43231891-d8f5-41bd-aeb9-350f1a95dc40 service nova] [instance: de68a921-bf67-4794-923d-4e062d8ff802] No waiting events found dispatching network-vif-plugged-ca7d9759-5177-4ea2-a411-3bae4181182d {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 809.855544] env[63538]: WARNING nova.compute.manager [req-a3a84f62-7493-49b0-b7b2-ceb7fbaa041f req-43231891-d8f5-41bd-aeb9-350f1a95dc40 service nova] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Received unexpected event network-vif-plugged-ca7d9759-5177-4ea2-a411-3bae4181182d for instance with vm_state building and task_state spawning. [ 809.992980] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d6354fa9-07c2-4aaa-a817-374a90a58ac4 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Lock "5bf7ed57-62d5-4abc-96d8-78b979baed92" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.351s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.018038] env[63538]: DEBUG oslo_concurrency.lockutils [req-d84db8b3-a287-4ff6-8c0d-e3875cabde18 req-12d22763-d0c7-4aac-8c59-0356f9f4f509 service nova] Releasing lock "refresh_cache-87f8bb3e-6f32-4850-ac54-efad0befb268" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.228435] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100956, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.252298] env[63538]: DEBUG nova.compute.manager [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 810.259035] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d1694620-babb-4286-9992-ec01c7064b93 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.023s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.259035] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52afb387-408e-9908-b47e-4a41c72fffc9, 'name': SearchDatastore_Task, 'duration_secs': 0.025295} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.259035] env[63538]: DEBUG nova.network.neutron [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Successfully created port: 177a8c57-989c-48d3-bf05-40bead2e9b7f {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 810.261267] env[63538]: DEBUG oslo_concurrency.lockutils [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.996s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 810.261928] env[63538]: DEBUG nova.objects.instance [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lazy-loading 'resources' on Instance uuid dbf48807-08a7-46d1-8454-42437a9f87c0 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 810.264057] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f43e401a-9770-4544-8e56-9c6e91f09df0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.272126] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 810.272126] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bc3fa6-79b2-c1a4-2040-a78a84c34d0e" [ 810.272126] env[63538]: _type = "Task" [ 810.272126] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.279349] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "refresh_cache-de68a921-bf67-4794-923d-4e062d8ff802" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.279349] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquired lock "refresh_cache-de68a921-bf67-4794-923d-4e062d8ff802" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.279349] env[63538]: DEBUG nova.network.neutron [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 810.287810] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bc3fa6-79b2-c1a4-2040-a78a84c34d0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.309032] env[63538]: DEBUG oslo_vmware.api [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5100955, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.967016} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.309353] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 6850191a-4190-4795-ae18-830b41a76085/6850191a-4190-4795-ae18-830b41a76085.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 810.312049] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 810.312049] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e52dac59-9659-4556-b2eb-79976f2533c0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.319453] env[63538]: DEBUG oslo_vmware.api [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Waiting for the task: (returnval){ [ 810.319453] env[63538]: value = "task-5100957" [ 810.319453] env[63538]: _type = "Task" [ 810.319453] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.331273] env[63538]: DEBUG oslo_vmware.api [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5100957, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.331984] env[63538]: DEBUG nova.objects.instance [None req-4b21ab62-fd3b-4d94-9e81-2a4ea355a919 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lazy-loading 'flavor' on Instance uuid 466be7db-79e4-49fd-aa3b-56fbe5c60457 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 810.496076] env[63538]: DEBUG nova.compute.manager [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 810.720302] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100956, 'name': ReconfigVM_Task, 'duration_secs': 0.751306} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.720636] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 79f4cdd9-219a-4440-9dd2-9b2a360965b1/79f4cdd9-219a-4440-9dd2-9b2a360965b1.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 810.721331] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ffb42159-5baf-499a-a843-077687da9b6e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.729867] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 810.729867] env[63538]: value = "task-5100958" [ 810.729867] env[63538]: _type = "Task" [ 810.729867] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.738871] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100958, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.784983] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bc3fa6-79b2-c1a4-2040-a78a84c34d0e, 'name': SearchDatastore_Task, 'duration_secs': 0.013829} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.785378] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.785580] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 87f8bb3e-6f32-4850-ac54-efad0befb268/87f8bb3e-6f32-4850-ac54-efad0befb268.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 810.793029] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-45adbe1b-4cc9-42c0-b886-accde3cf8d08 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.799662] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 810.799662] env[63538]: value = "task-5100959" [ 810.799662] env[63538]: _type = "Task" [ 810.799662] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.815373] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100959, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.832264] env[63538]: DEBUG oslo_vmware.api [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5100957, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083116} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.837283] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 810.838871] env[63538]: DEBUG nova.network.neutron [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 810.841812] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2ae182-df28-4308-9db0-86eb754594b8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.845241] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4b21ab62-fd3b-4d94-9e81-2a4ea355a919 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "refresh_cache-466be7db-79e4-49fd-aa3b-56fbe5c60457" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.845241] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4b21ab62-fd3b-4d94-9e81-2a4ea355a919 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired lock "refresh_cache-466be7db-79e4-49fd-aa3b-56fbe5c60457" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.845427] env[63538]: DEBUG nova.network.neutron [None req-4b21ab62-fd3b-4d94-9e81-2a4ea355a919 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 810.845606] env[63538]: DEBUG nova.objects.instance [None req-4b21ab62-fd3b-4d94-9e81-2a4ea355a919 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lazy-loading 'info_cache' on Instance uuid 466be7db-79e4-49fd-aa3b-56fbe5c60457 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 810.876511] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] 6850191a-4190-4795-ae18-830b41a76085/6850191a-4190-4795-ae18-830b41a76085.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 810.881615] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d1c6689-3589-4ff4-914f-028bf990139c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.906562] env[63538]: DEBUG oslo_vmware.api [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Waiting for the task: (returnval){ [ 810.906562] env[63538]: value = "task-5100960" [ 810.906562] env[63538]: _type = "Task" [ 810.906562] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.917021] env[63538]: DEBUG oslo_vmware.api [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5100960, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.015553] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.091528] env[63538]: DEBUG nova.network.neutron [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Updating instance_info_cache with network_info: [{"id": "ca7d9759-5177-4ea2-a411-3bae4181182d", "address": "fa:16:3e:67:5a:a6", "network": {"id": "549f45d6-3d4f-4476-92ba-4bd87fefba51", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-822672880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55edcd65da7b4a569a4c27aab4819cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ded8bac-871f-491b-94ec-cb67c08bc828", "external-id": "nsx-vlan-transportzone-212", "segmentation_id": 212, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca7d9759-51", "ovs_interfaceid": "ca7d9759-5177-4ea2-a411-3bae4181182d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.246533] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100958, 'name': Rename_Task, 'duration_secs': 0.178425} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.251206] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 811.251577] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ddfc7af7-0c5b-44ad-ad4d-cf1e8924a62b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.264990] env[63538]: DEBUG nova.compute.manager [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 811.274264] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 811.274264] env[63538]: value = "task-5100961" [ 811.274264] env[63538]: _type = "Task" [ 811.274264] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.288576] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100961, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.302707] env[63538]: DEBUG nova.virt.hardware [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 811.303011] env[63538]: DEBUG nova.virt.hardware [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 811.303198] env[63538]: DEBUG nova.virt.hardware [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 811.303423] env[63538]: DEBUG nova.virt.hardware [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 811.303568] env[63538]: DEBUG nova.virt.hardware [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 811.303733] env[63538]: DEBUG nova.virt.hardware [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 811.303968] env[63538]: DEBUG nova.virt.hardware [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 811.304286] env[63538]: DEBUG nova.virt.hardware [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 811.304481] env[63538]: DEBUG nova.virt.hardware [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 811.304697] env[63538]: DEBUG nova.virt.hardware [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 811.304889] env[63538]: DEBUG nova.virt.hardware [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 811.306585] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f751b2d6-7e57-4f38-b916-6dbda242e6a4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.324056] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100959, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.330117] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2c13f6-0ada-47e9-a106-4ba5c38790f3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.349125] env[63538]: DEBUG nova.objects.base [None req-4b21ab62-fd3b-4d94-9e81-2a4ea355a919 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Object Instance<466be7db-79e4-49fd-aa3b-56fbe5c60457> lazy-loaded attributes: flavor,info_cache {{(pid=63538) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 811.412967] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f42d41b-854d-4e5e-95df-b911f300f9c5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.423529] env[63538]: DEBUG oslo_vmware.api [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5100960, 'name': ReconfigVM_Task, 'duration_secs': 0.321737} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.425827] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Reconfigured VM instance instance-00000036 to attach disk [datastore1] 6850191a-4190-4795-ae18-830b41a76085/6850191a-4190-4795-ae18-830b41a76085.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 811.426725] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-53861b6f-edf5-449b-8a6f-cfddcc11029b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.429544] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4146600b-417f-420f-9440-13a210c7774c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.464359] env[63538]: DEBUG oslo_vmware.api [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Waiting for the task: (returnval){ [ 811.464359] env[63538]: value = "task-5100962" [ 811.464359] env[63538]: _type = "Task" [ 811.464359] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.465130] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df7595e-1e7c-40a9-8c6f-d443537859dc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.480881] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-581ae6f3-0005-4483-89b1-b51627b01d3e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.485785] env[63538]: DEBUG oslo_vmware.api [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5100962, 'name': Rename_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.501760] env[63538]: DEBUG nova.compute.provider_tree [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 811.595410] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Releasing lock "refresh_cache-de68a921-bf67-4794-923d-4e062d8ff802" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 811.595410] env[63538]: DEBUG nova.compute.manager [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Instance network_info: |[{"id": "ca7d9759-5177-4ea2-a411-3bae4181182d", "address": "fa:16:3e:67:5a:a6", "network": {"id": "549f45d6-3d4f-4476-92ba-4bd87fefba51", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-822672880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55edcd65da7b4a569a4c27aab4819cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ded8bac-871f-491b-94ec-cb67c08bc828", "external-id": "nsx-vlan-transportzone-212", "segmentation_id": 212, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca7d9759-51", "ovs_interfaceid": "ca7d9759-5177-4ea2-a411-3bae4181182d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 811.595858] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:5a:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ded8bac-871f-491b-94ec-cb67c08bc828', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ca7d9759-5177-4ea2-a411-3bae4181182d', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 811.604175] env[63538]: DEBUG oslo.service.loopingcall [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 811.604175] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 811.604788] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3fe31d9a-d9ef-4333-bbfa-35c95e8fcadc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.626089] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 811.626089] env[63538]: value = "task-5100963" [ 811.626089] env[63538]: _type = "Task" [ 811.626089] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.634913] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100963, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.760788] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Acquiring lock "5bf7ed57-62d5-4abc-96d8-78b979baed92" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.762029] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Lock "5bf7ed57-62d5-4abc-96d8-78b979baed92" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.762029] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Acquiring lock "5bf7ed57-62d5-4abc-96d8-78b979baed92-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.762029] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Lock "5bf7ed57-62d5-4abc-96d8-78b979baed92-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.762353] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Lock "5bf7ed57-62d5-4abc-96d8-78b979baed92-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.766093] env[63538]: INFO nova.compute.manager [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Terminating instance [ 811.770533] env[63538]: DEBUG nova.compute.manager [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 811.770533] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 811.770533] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec2ef80-0355-4aae-a69a-96147c0e40e3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.785339] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 811.788533] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ffb792ab-ce23-47c6-846c-f4cbc73da405 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.790426] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100961, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.797017] env[63538]: DEBUG oslo_vmware.api [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Waiting for the task: (returnval){ [ 811.797017] env[63538]: value = "task-5100964" [ 811.797017] env[63538]: _type = "Task" [ 811.797017] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.807322] env[63538]: DEBUG oslo_vmware.api [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Task: {'id': task-5100964, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.818356] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100959, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.691477} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.818635] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 87f8bb3e-6f32-4850-ac54-efad0befb268/87f8bb3e-6f32-4850-ac54-efad0befb268.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 811.818857] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 811.819146] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e6f8e821-c0da-450d-b2cc-0a0577e37f74 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.827578] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 811.827578] env[63538]: value = "task-5100965" [ 811.827578] env[63538]: _type = "Task" [ 811.827578] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.843740] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100965, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.945560] env[63538]: DEBUG nova.compute.manager [req-7c2a1475-a7bf-4d1a-9758-85face4e4fbf req-eb189535-3221-4256-9da0-239e9ec25b6c service nova] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Received event network-vif-plugged-177a8c57-989c-48d3-bf05-40bead2e9b7f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 811.945829] env[63538]: DEBUG oslo_concurrency.lockutils [req-7c2a1475-a7bf-4d1a-9758-85face4e4fbf req-eb189535-3221-4256-9da0-239e9ec25b6c service nova] Acquiring lock "49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.946073] env[63538]: DEBUG oslo_concurrency.lockutils [req-7c2a1475-a7bf-4d1a-9758-85face4e4fbf req-eb189535-3221-4256-9da0-239e9ec25b6c service nova] Lock "49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.946272] env[63538]: DEBUG oslo_concurrency.lockutils [req-7c2a1475-a7bf-4d1a-9758-85face4e4fbf req-eb189535-3221-4256-9da0-239e9ec25b6c service nova] Lock "49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.946466] env[63538]: DEBUG nova.compute.manager [req-7c2a1475-a7bf-4d1a-9758-85face4e4fbf req-eb189535-3221-4256-9da0-239e9ec25b6c service nova] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] No waiting events found dispatching network-vif-plugged-177a8c57-989c-48d3-bf05-40bead2e9b7f {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 811.946660] env[63538]: WARNING nova.compute.manager [req-7c2a1475-a7bf-4d1a-9758-85face4e4fbf req-eb189535-3221-4256-9da0-239e9ec25b6c service nova] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Received unexpected event network-vif-plugged-177a8c57-989c-48d3-bf05-40bead2e9b7f for instance with vm_state building and task_state spawning. [ 811.982209] env[63538]: DEBUG oslo_vmware.api [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5100962, 'name': Rename_Task, 'duration_secs': 0.184181} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.982327] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 811.985354] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-877d16e6-5453-486a-bd92-78e08a18af0c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.996444] env[63538]: DEBUG oslo_vmware.api [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Waiting for the task: (returnval){ [ 811.996444] env[63538]: value = "task-5100966" [ 811.996444] env[63538]: _type = "Task" [ 811.996444] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.003780] env[63538]: DEBUG nova.network.neutron [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Successfully updated port: 177a8c57-989c-48d3-bf05-40bead2e9b7f {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 812.006355] env[63538]: DEBUG nova.scheduler.client.report [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 812.014366] env[63538]: DEBUG nova.compute.manager [req-1934602b-63fd-4d97-bc34-81a2f874f26e req-7fc21c4d-92c6-4981-90db-45f6e9b77582 service nova] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Received event network-changed-ca7d9759-5177-4ea2-a411-3bae4181182d {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 812.014603] env[63538]: DEBUG nova.compute.manager [req-1934602b-63fd-4d97-bc34-81a2f874f26e req-7fc21c4d-92c6-4981-90db-45f6e9b77582 service nova] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Refreshing instance network info cache due to event network-changed-ca7d9759-5177-4ea2-a411-3bae4181182d. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 812.014833] env[63538]: DEBUG oslo_concurrency.lockutils [req-1934602b-63fd-4d97-bc34-81a2f874f26e req-7fc21c4d-92c6-4981-90db-45f6e9b77582 service nova] Acquiring lock "refresh_cache-de68a921-bf67-4794-923d-4e062d8ff802" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.014991] env[63538]: DEBUG oslo_concurrency.lockutils [req-1934602b-63fd-4d97-bc34-81a2f874f26e req-7fc21c4d-92c6-4981-90db-45f6e9b77582 service nova] Acquired lock "refresh_cache-de68a921-bf67-4794-923d-4e062d8ff802" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.015447] env[63538]: DEBUG nova.network.neutron [req-1934602b-63fd-4d97-bc34-81a2f874f26e req-7fc21c4d-92c6-4981-90db-45f6e9b77582 service nova] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Refreshing network info cache for port ca7d9759-5177-4ea2-a411-3bae4181182d {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 812.024949] env[63538]: DEBUG oslo_vmware.api [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5100966, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.140593] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100963, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.262396] env[63538]: DEBUG nova.network.neutron [None req-4b21ab62-fd3b-4d94-9e81-2a4ea355a919 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Updating instance_info_cache with network_info: [{"id": "a679ee9b-3e51-4ce7-ab24-0792218d36ba", "address": "fa:16:3e:08:aa:47", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa679ee9b-3e", "ovs_interfaceid": "a679ee9b-3e51-4ce7-ab24-0792218d36ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.287664] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100961, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.309771] env[63538]: DEBUG oslo_vmware.api [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Task: {'id': task-5100964, 'name': PowerOffVM_Task, 'duration_secs': 0.325104} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.310103] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 812.310290] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 812.310556] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1e212839-f84d-4010-975d-520f7723f7f9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.340630] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100965, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072247} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.340630] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 812.341560] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717de357-3623-4118-8ec2-2be959783e7e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.367586] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] 87f8bb3e-6f32-4850-ac54-efad0befb268/87f8bb3e-6f32-4850-ac54-efad0befb268.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 812.367951] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8cddcb7a-b3ad-4aa1-a9d5-df43ed7bf199 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.384232] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 812.384490] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 812.384686] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Deleting the datastore file [datastore1] 5bf7ed57-62d5-4abc-96d8-78b979baed92 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 812.385432] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5bdaf51b-521e-43e7-b030-3a24aa0356b7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.392436] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 812.392436] env[63538]: value = "task-5100968" [ 812.392436] env[63538]: _type = "Task" [ 812.392436] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.393969] env[63538]: DEBUG oslo_vmware.api [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Waiting for the task: (returnval){ [ 812.393969] env[63538]: value = "task-5100969" [ 812.393969] env[63538]: _type = "Task" [ 812.393969] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.409316] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100968, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.409900] env[63538]: DEBUG oslo_vmware.api [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Task: {'id': task-5100969, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.506354] env[63538]: DEBUG oslo_vmware.api [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5100966, 'name': PowerOnVM_Task} progress is 78%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.513067] env[63538]: DEBUG oslo_concurrency.lockutils [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "refresh_cache-49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.513223] env[63538]: DEBUG oslo_concurrency.lockutils [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired lock "refresh_cache-49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.513375] env[63538]: DEBUG nova.network.neutron [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 812.518828] env[63538]: DEBUG oslo_concurrency.lockutils [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.258s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.521276] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.516s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.521526] env[63538]: DEBUG nova.objects.instance [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lazy-loading 'pci_requests' on Instance uuid f9fa5578-acf3-416f-9cb0-8ceb00e5132d {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 812.552808] env[63538]: INFO nova.scheduler.client.report [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Deleted allocations for instance dbf48807-08a7-46d1-8454-42437a9f87c0 [ 812.638117] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100963, 'name': CreateVM_Task, 'duration_secs': 0.521604} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.638332] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 812.644018] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.644018] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.644018] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 812.644018] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-009dba3b-e77c-44ac-b61e-5b152d54862b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.650073] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 812.650073] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520d48d1-db14-f2f9-9a1f-e4a417becc35" [ 812.650073] env[63538]: _type = "Task" [ 812.650073] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.659880] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520d48d1-db14-f2f9-9a1f-e4a417becc35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.765705] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4b21ab62-fd3b-4d94-9e81-2a4ea355a919 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Releasing lock "refresh_cache-466be7db-79e4-49fd-aa3b-56fbe5c60457" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.788786] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100961, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.836397] env[63538]: DEBUG nova.network.neutron [req-1934602b-63fd-4d97-bc34-81a2f874f26e req-7fc21c4d-92c6-4981-90db-45f6e9b77582 service nova] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Updated VIF entry in instance network info cache for port ca7d9759-5177-4ea2-a411-3bae4181182d. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 812.837040] env[63538]: DEBUG nova.network.neutron [req-1934602b-63fd-4d97-bc34-81a2f874f26e req-7fc21c4d-92c6-4981-90db-45f6e9b77582 service nova] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Updating instance_info_cache with network_info: [{"id": "ca7d9759-5177-4ea2-a411-3bae4181182d", "address": "fa:16:3e:67:5a:a6", "network": {"id": "549f45d6-3d4f-4476-92ba-4bd87fefba51", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-822672880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55edcd65da7b4a569a4c27aab4819cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ded8bac-871f-491b-94ec-cb67c08bc828", "external-id": "nsx-vlan-transportzone-212", "segmentation_id": 212, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca7d9759-51", "ovs_interfaceid": "ca7d9759-5177-4ea2-a411-3bae4181182d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.911072] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100968, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.913695] env[63538]: DEBUG oslo_vmware.api [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Task: {'id': task-5100969, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.014562] env[63538]: DEBUG oslo_vmware.api [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5100966, 'name': PowerOnVM_Task, 'duration_secs': 1.00233} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.018061] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 813.018814] env[63538]: INFO nova.compute.manager [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Took 15.57 seconds to spawn the instance on the hypervisor. [ 813.019226] env[63538]: DEBUG nova.compute.manager [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 813.020570] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac7ae6a8-271c-4d96-9364-6b5f7b737f7a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.026301] env[63538]: DEBUG nova.objects.instance [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lazy-loading 'numa_topology' on Instance uuid f9fa5578-acf3-416f-9cb0-8ceb00e5132d {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 813.063839] env[63538]: DEBUG oslo_concurrency.lockutils [None req-32c4aa37-3f1c-4777-8519-34888b2069bd tempest-FloatingIPsAssociationTestJSON-1944781937 tempest-FloatingIPsAssociationTestJSON-1944781937-project-member] Lock "dbf48807-08a7-46d1-8454-42437a9f87c0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.991s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.092109] env[63538]: DEBUG nova.network.neutron [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 813.165198] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520d48d1-db14-f2f9-9a1f-e4a417becc35, 'name': SearchDatastore_Task, 'duration_secs': 0.028503} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.165535] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 813.165777] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 813.166039] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.166207] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.166401] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 813.167008] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b2ae68e-1060-4f21-86e1-c79a83f77244 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.181308] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 813.181624] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 813.182376] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64d13611-dfcc-404c-97ff-d7f4fe53b0b2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.188851] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 813.188851] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52514775-18ef-612c-1a82-a35feb1bca10" [ 813.188851] env[63538]: _type = "Task" [ 813.188851] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.198291] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52514775-18ef-612c-1a82-a35feb1bca10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.269610] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b21ab62-fd3b-4d94-9e81-2a4ea355a919 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 813.269936] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-996b268e-7064-446f-95eb-02673664ecbc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.278124] env[63538]: DEBUG oslo_vmware.api [None req-4b21ab62-fd3b-4d94-9e81-2a4ea355a919 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 813.278124] env[63538]: value = "task-5100970" [ 813.278124] env[63538]: _type = "Task" [ 813.278124] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.290342] env[63538]: DEBUG oslo_vmware.api [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100961, 'name': PowerOnVM_Task, 'duration_secs': 1.75239} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.295975] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 813.296244] env[63538]: INFO nova.compute.manager [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Took 10.34 seconds to spawn the instance on the hypervisor. [ 813.296442] env[63538]: DEBUG nova.compute.manager [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 813.296750] env[63538]: DEBUG oslo_vmware.api [None req-4b21ab62-fd3b-4d94-9e81-2a4ea355a919 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5100970, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.297602] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41adea4c-1f50-4bcd-8c6e-0cc3f54565a9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.340404] env[63538]: DEBUG oslo_concurrency.lockutils [req-1934602b-63fd-4d97-bc34-81a2f874f26e req-7fc21c4d-92c6-4981-90db-45f6e9b77582 service nova] Releasing lock "refresh_cache-de68a921-bf67-4794-923d-4e062d8ff802" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 813.389350] env[63538]: DEBUG nova.network.neutron [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Updating instance_info_cache with network_info: [{"id": "177a8c57-989c-48d3-bf05-40bead2e9b7f", "address": "fa:16:3e:a7:77:52", "network": {"id": "01450801-f549-4844-80bd-208ec405c65f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684310085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "492427e54e1048f292dab2abdac71af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap177a8c57-98", "ovs_interfaceid": "177a8c57-989c-48d3-bf05-40bead2e9b7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.406200] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100968, 'name': ReconfigVM_Task, 'duration_secs': 0.672324} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.407471] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Reconfigured VM instance instance-00000039 to attach disk [datastore1] 87f8bb3e-6f32-4850-ac54-efad0befb268/87f8bb3e-6f32-4850-ac54-efad0befb268.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 813.408642] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4327eb95-bf63-4a4e-8771-a4a5dd7c003a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.413968] env[63538]: DEBUG oslo_vmware.api [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Task: {'id': task-5100969, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.520888} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.414596] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 813.414813] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 813.414977] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 813.415299] env[63538]: INFO nova.compute.manager [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Took 1.65 seconds to destroy the instance on the hypervisor. [ 813.415677] env[63538]: DEBUG oslo.service.loopingcall [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 813.415957] env[63538]: DEBUG nova.compute.manager [-] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 813.416117] env[63538]: DEBUG nova.network.neutron [-] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 813.421159] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 813.421159] env[63538]: value = "task-5100971" [ 813.421159] env[63538]: _type = "Task" [ 813.421159] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.433953] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100971, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.534223] env[63538]: INFO nova.compute.claims [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 813.553343] env[63538]: INFO nova.compute.manager [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Took 59.22 seconds to build instance. [ 813.700279] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52514775-18ef-612c-1a82-a35feb1bca10, 'name': SearchDatastore_Task, 'duration_secs': 0.054364} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.701029] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5b1d489-169e-4fd3-8ab6-8dd32b43d8e6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.709424] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 813.709424] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cf5022-93be-11f9-2ace-abf25c93dedb" [ 813.709424] env[63538]: _type = "Task" [ 813.709424] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.719116] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cf5022-93be-11f9-2ace-abf25c93dedb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.790026] env[63538]: DEBUG oslo_vmware.api [None req-4b21ab62-fd3b-4d94-9e81-2a4ea355a919 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5100970, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.827237] env[63538]: INFO nova.compute.manager [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Took 52.27 seconds to build instance. [ 813.892423] env[63538]: DEBUG oslo_concurrency.lockutils [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Releasing lock "refresh_cache-49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 813.893131] env[63538]: DEBUG nova.compute.manager [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Instance network_info: |[{"id": "177a8c57-989c-48d3-bf05-40bead2e9b7f", "address": "fa:16:3e:a7:77:52", "network": {"id": "01450801-f549-4844-80bd-208ec405c65f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684310085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "492427e54e1048f292dab2abdac71af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap177a8c57-98", "ovs_interfaceid": "177a8c57-989c-48d3-bf05-40bead2e9b7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 813.893909] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a7:77:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32463b6d-4569-4755-8a29-873a028690a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '177a8c57-989c-48d3-bf05-40bead2e9b7f', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 813.903989] env[63538]: DEBUG oslo.service.loopingcall [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 813.904480] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 813.904958] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-287a7988-8999-4d90-be58-7fbc236eda9d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.935745] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100971, 'name': Rename_Task, 'duration_secs': 0.199804} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.938276] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 813.938598] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 813.938598] env[63538]: value = "task-5100972" [ 813.938598] env[63538]: _type = "Task" [ 813.938598] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.938823] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e62540b8-72da-4e11-8b5a-a08156ec1ff4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.950090] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100972, 'name': CreateVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.952074] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 813.952074] env[63538]: value = "task-5100973" [ 813.952074] env[63538]: _type = "Task" [ 813.952074] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.964497] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100973, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.043874] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Acquiring lock "6850191a-4190-4795-ae18-830b41a76085" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.056701] env[63538]: DEBUG oslo_concurrency.lockutils [None req-69122fe0-108a-4f37-8c16-c0641ca3e840 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Lock "6850191a-4190-4795-ae18-830b41a76085" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.322s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.058567] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Lock "6850191a-4190-4795-ae18-830b41a76085" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.015s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.058817] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Acquiring lock "6850191a-4190-4795-ae18-830b41a76085-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.059047] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Lock "6850191a-4190-4795-ae18-830b41a76085-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.059232] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Lock "6850191a-4190-4795-ae18-830b41a76085-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.061479] env[63538]: INFO nova.compute.manager [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Terminating instance [ 814.068993] env[63538]: DEBUG nova.compute.manager [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 814.073445] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 814.074569] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3d669ac-d571-4d49-b707-a9f281355be9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.089396] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 814.089764] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-47b12533-06e2-400a-9ec6-7c5d294e7ad9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.098272] env[63538]: DEBUG oslo_vmware.api [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Waiting for the task: (returnval){ [ 814.098272] env[63538]: value = "task-5100974" [ 814.098272] env[63538]: _type = "Task" [ 814.098272] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.113214] env[63538]: DEBUG oslo_vmware.api [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5100974, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.222806] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cf5022-93be-11f9-2ace-abf25c93dedb, 'name': SearchDatastore_Task, 'duration_secs': 0.010977} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.222969] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.223259] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] de68a921-bf67-4794-923d-4e062d8ff802/de68a921-bf67-4794-923d-4e062d8ff802.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 814.223942] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9cff0427-bb2f-46d8-8efe-deb844278cee {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.233269] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 814.233269] env[63538]: value = "task-5100975" [ 814.233269] env[63538]: _type = "Task" [ 814.233269] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.247260] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100975, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.292282] env[63538]: DEBUG oslo_vmware.api [None req-4b21ab62-fd3b-4d94-9e81-2a4ea355a919 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5100970, 'name': PowerOnVM_Task, 'duration_secs': 0.618633} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.292282] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b21ab62-fd3b-4d94-9e81-2a4ea355a919 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 814.292282] env[63538]: DEBUG nova.compute.manager [None req-4b21ab62-fd3b-4d94-9e81-2a4ea355a919 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 814.292819] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f06ed6f2-ebda-4aed-8619-b4df3926fe7b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.329763] env[63538]: DEBUG oslo_concurrency.lockutils [None req-db17450d-a11f-4e43-9eea-d899bb23ab76 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "79f4cdd9-219a-4440-9dd2-9b2a360965b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.140s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.439197] env[63538]: DEBUG nova.compute.manager [req-f635bfc3-3678-4298-9f84-eda76c764ccc req-6f80966e-1700-4785-b630-14346092a4ed service nova] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Received event network-vif-deleted-90b87b42-1aac-4697-91e2-84193dd93b89 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 814.439456] env[63538]: INFO nova.compute.manager [req-f635bfc3-3678-4298-9f84-eda76c764ccc req-6f80966e-1700-4785-b630-14346092a4ed service nova] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Neutron deleted interface 90b87b42-1aac-4697-91e2-84193dd93b89; detaching it from the instance and deleting it from the info cache [ 814.439651] env[63538]: DEBUG nova.network.neutron [req-f635bfc3-3678-4298-9f84-eda76c764ccc req-6f80966e-1700-4785-b630-14346092a4ed service nova] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.455702] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100972, 'name': CreateVM_Task, 'duration_secs': 0.417795} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.458214] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 814.459602] env[63538]: DEBUG oslo_concurrency.lockutils [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 814.459846] env[63538]: DEBUG oslo_concurrency.lockutils [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.460144] env[63538]: DEBUG oslo_concurrency.lockutils [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 814.461213] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-697c22f4-49a7-418e-9e5d-c8902de0fd05 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.469536] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100973, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.474235] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 814.474235] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f0727c-7abe-5f70-5e2a-289a97125824" [ 814.474235] env[63538]: _type = "Task" [ 814.474235] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.490016] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f0727c-7abe-5f70-5e2a-289a97125824, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.497666] env[63538]: DEBUG nova.network.neutron [-] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.525155] env[63538]: DEBUG nova.compute.manager [req-c1852aeb-1b61-410f-9feb-1e6eb951eeb9 req-05d9d639-cbdd-4cf6-800a-dc4578034ef6 service nova] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Received event network-changed-177a8c57-989c-48d3-bf05-40bead2e9b7f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 814.525842] env[63538]: DEBUG nova.compute.manager [req-c1852aeb-1b61-410f-9feb-1e6eb951eeb9 req-05d9d639-cbdd-4cf6-800a-dc4578034ef6 service nova] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Refreshing instance network info cache due to event network-changed-177a8c57-989c-48d3-bf05-40bead2e9b7f. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 814.525842] env[63538]: DEBUG oslo_concurrency.lockutils [req-c1852aeb-1b61-410f-9feb-1e6eb951eeb9 req-05d9d639-cbdd-4cf6-800a-dc4578034ef6 service nova] Acquiring lock "refresh_cache-49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 814.525842] env[63538]: DEBUG oslo_concurrency.lockutils [req-c1852aeb-1b61-410f-9feb-1e6eb951eeb9 req-05d9d639-cbdd-4cf6-800a-dc4578034ef6 service nova] Acquired lock "refresh_cache-49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.526664] env[63538]: DEBUG nova.network.neutron [req-c1852aeb-1b61-410f-9feb-1e6eb951eeb9 req-05d9d639-cbdd-4cf6-800a-dc4578034ef6 service nova] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Refreshing network info cache for port 177a8c57-989c-48d3-bf05-40bead2e9b7f {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 814.565017] env[63538]: DEBUG nova.compute.manager [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 814.611131] env[63538]: DEBUG oslo_vmware.api [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5100974, 'name': PowerOffVM_Task, 'duration_secs': 0.215552} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.615473] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 814.615722] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 814.616274] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f75325f-edd4-45ef-8a88-c2b4f33f4508 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.747806] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100975, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.789436] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 814.789436] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 814.789436] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Deleting the datastore file [datastore1] 6850191a-4190-4795-ae18-830b41a76085 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 814.789436] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc7fe5ee-a06a-4095-a647-2587ca7ec831 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.804809] env[63538]: DEBUG oslo_vmware.api [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Waiting for the task: (returnval){ [ 814.804809] env[63538]: value = "task-5100977" [ 814.804809] env[63538]: _type = "Task" [ 814.804809] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.824457] env[63538]: DEBUG oslo_vmware.api [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5100977, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.943362] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bf13b72e-c522-4703-a2d9-af91c32ea316 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.955038] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb7e4b4e-1b75-4f2d-9fda-f1b3ed6c958b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.989160] env[63538]: DEBUG oslo_vmware.api [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100973, 'name': PowerOnVM_Task, 'duration_secs': 0.600929} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.996553] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 814.996852] env[63538]: INFO nova.compute.manager [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Took 9.21 seconds to spawn the instance on the hypervisor. [ 814.997107] env[63538]: DEBUG nova.compute.manager [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 814.997444] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f0727c-7abe-5f70-5e2a-289a97125824, 'name': SearchDatastore_Task, 'duration_secs': 0.017596} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.012626] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-496c4e40-4be8-4c97-a63b-5f8d6f5333ba {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.015784] env[63538]: DEBUG oslo_concurrency.lockutils [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 815.016082] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 815.016373] env[63538]: DEBUG oslo_concurrency.lockutils [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.016551] env[63538]: DEBUG oslo_concurrency.lockutils [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.016763] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 815.017254] env[63538]: INFO nova.compute.manager [-] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Took 1.60 seconds to deallocate network for instance. [ 815.017638] env[63538]: DEBUG nova.compute.manager [req-f635bfc3-3678-4298-9f84-eda76c764ccc req-6f80966e-1700-4785-b630-14346092a4ed service nova] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Detach interface failed, port_id=90b87b42-1aac-4697-91e2-84193dd93b89, reason: Instance 5bf7ed57-62d5-4abc-96d8-78b979baed92 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 815.020829] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-47941dc3-5e1a-4cb3-b038-b9659255a91c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.041193] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 815.041193] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 815.042570] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-222cdf81-0d9e-4dc0-8700-4c20b7127219 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.052390] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 815.052390] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a7c2f0-f5bd-d87c-17af-bd4776ba0a94" [ 815.052390] env[63538]: _type = "Task" [ 815.052390] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.062746] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a7c2f0-f5bd-d87c-17af-bd4776ba0a94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.099498] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.175864] env[63538]: DEBUG oslo_concurrency.lockutils [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "79f4cdd9-219a-4440-9dd2-9b2a360965b1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.176183] env[63538]: DEBUG oslo_concurrency.lockutils [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "79f4cdd9-219a-4440-9dd2-9b2a360965b1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.176414] env[63538]: DEBUG oslo_concurrency.lockutils [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "79f4cdd9-219a-4440-9dd2-9b2a360965b1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.176626] env[63538]: DEBUG oslo_concurrency.lockutils [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "79f4cdd9-219a-4440-9dd2-9b2a360965b1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.176818] env[63538]: DEBUG oslo_concurrency.lockutils [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "79f4cdd9-219a-4440-9dd2-9b2a360965b1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.181592] env[63538]: INFO nova.compute.manager [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Terminating instance [ 815.184287] env[63538]: DEBUG nova.compute.manager [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 815.184512] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 815.185783] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5582da50-0797-4017-9053-7a195e51f719 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.195403] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 815.196841] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-630aab7f-c42b-48ce-bc89-a420c514b96c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.199556] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e92724df-90f0-4de7-ac44-1af917e36f06 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.211424] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecb74601-fb93-4f9d-9935-0a6adcdf2ee8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.216177] env[63538]: DEBUG oslo_vmware.api [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 815.216177] env[63538]: value = "task-5100978" [ 815.216177] env[63538]: _type = "Task" [ 815.216177] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.253170] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c2fdb17-6217-4696-9c44-78fd360c90a3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.259592] env[63538]: DEBUG oslo_vmware.api [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100978, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.269823] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01011ac4-20c4-4189-bbfe-c7de1c2331bb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.273801] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100975, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.619358} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.274155] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] de68a921-bf67-4794-923d-4e062d8ff802/de68a921-bf67-4794-923d-4e062d8ff802.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 815.274406] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 815.275116] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-694b0693-8258-49bd-a3ed-72189d5d9003 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.285903] env[63538]: DEBUG nova.compute.provider_tree [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 815.294211] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 815.294211] env[63538]: value = "task-5100979" [ 815.294211] env[63538]: _type = "Task" [ 815.294211] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.303900] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100979, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.315790] env[63538]: DEBUG oslo_vmware.api [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5100977, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.23319} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.316118] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 815.316327] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 815.316514] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 815.316696] env[63538]: INFO nova.compute.manager [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: 6850191a-4190-4795-ae18-830b41a76085] Took 1.24 seconds to destroy the instance on the hypervisor. [ 815.317224] env[63538]: DEBUG oslo.service.loopingcall [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 815.317224] env[63538]: DEBUG nova.compute.manager [-] [instance: 6850191a-4190-4795-ae18-830b41a76085] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 815.317324] env[63538]: DEBUG nova.network.neutron [-] [instance: 6850191a-4190-4795-ae18-830b41a76085] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 815.539549] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.566607] env[63538]: INFO nova.compute.manager [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Took 52.92 seconds to build instance. [ 815.573416] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a7c2f0-f5bd-d87c-17af-bd4776ba0a94, 'name': SearchDatastore_Task, 'duration_secs': 0.014913} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.573416] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3733a2c9-4bb1-4a5b-9306-cd1368c66c4a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.579425] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 815.579425] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520370bf-17b4-212e-a2b2-80fb2b82d231" [ 815.579425] env[63538]: _type = "Task" [ 815.579425] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.589272] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520370bf-17b4-212e-a2b2-80fb2b82d231, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.730908] env[63538]: DEBUG oslo_vmware.api [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100978, 'name': PowerOffVM_Task, 'duration_secs': 0.360067} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.731271] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 815.731454] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 815.731951] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-af145c14-4189-4914-8593-e27471e15f0a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.789278] env[63538]: DEBUG nova.scheduler.client.report [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 815.802691] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 815.802911] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 815.803114] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Deleting the datastore file [datastore1] 79f4cdd9-219a-4440-9dd2-9b2a360965b1 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 815.803385] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-268e9058-6375-4311-92c4-0ed3ac9cf295 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.810077] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100979, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.196787} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.811639] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 815.813546] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44890def-8d64-4e34-bc23-a042c78785b9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.817979] env[63538]: DEBUG oslo_vmware.api [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 815.817979] env[63538]: value = "task-5100981" [ 815.817979] env[63538]: _type = "Task" [ 815.817979] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.818824] env[63538]: DEBUG nova.network.neutron [req-c1852aeb-1b61-410f-9feb-1e6eb951eeb9 req-05d9d639-cbdd-4cf6-800a-dc4578034ef6 service nova] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Updated VIF entry in instance network info cache for port 177a8c57-989c-48d3-bf05-40bead2e9b7f. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 815.819212] env[63538]: DEBUG nova.network.neutron [req-c1852aeb-1b61-410f-9feb-1e6eb951eeb9 req-05d9d639-cbdd-4cf6-800a-dc4578034ef6 service nova] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Updating instance_info_cache with network_info: [{"id": "177a8c57-989c-48d3-bf05-40bead2e9b7f", "address": "fa:16:3e:a7:77:52", "network": {"id": "01450801-f549-4844-80bd-208ec405c65f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684310085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "492427e54e1048f292dab2abdac71af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap177a8c57-98", "ovs_interfaceid": "177a8c57-989c-48d3-bf05-40bead2e9b7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.860180] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] de68a921-bf67-4794-923d-4e062d8ff802/de68a921-bf67-4794-923d-4e062d8ff802.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 815.860180] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-834bfda7-e986-4812-8e63-30c83e54dc58 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.883509] env[63538]: DEBUG oslo_vmware.api [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100981, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.890908] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 815.890908] env[63538]: value = "task-5100982" [ 815.890908] env[63538]: _type = "Task" [ 815.890908] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.902492] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100982, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.068734] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3a2af326-f6c9-4906-8809-b850d3950c16 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "87f8bb3e-6f32-4850-ac54-efad0befb268" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.926s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.094460] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520370bf-17b4-212e-a2b2-80fb2b82d231, 'name': SearchDatastore_Task, 'duration_secs': 0.022271} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.094460] env[63538]: DEBUG oslo_concurrency.lockutils [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.094460] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1/49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 816.094460] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a261159-d973-46d8-a4a8-e3665303fd49 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.104391] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 816.104391] env[63538]: value = "task-5100983" [ 816.104391] env[63538]: _type = "Task" [ 816.104391] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.114758] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100983, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.296078] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.775s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.300619] env[63538]: DEBUG oslo_concurrency.lockutils [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 43.047s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.300619] env[63538]: DEBUG nova.objects.instance [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63538) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 816.324748] env[63538]: DEBUG oslo_concurrency.lockutils [req-c1852aeb-1b61-410f-9feb-1e6eb951eeb9 req-05d9d639-cbdd-4cf6-800a-dc4578034ef6 service nova] Releasing lock "refresh_cache-49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.336303] env[63538]: DEBUG oslo_vmware.api [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5100981, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.217157} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.336639] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 816.336851] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 816.337081] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 816.337326] env[63538]: INFO nova.compute.manager [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Took 1.15 seconds to destroy the instance on the hypervisor. [ 816.337537] env[63538]: DEBUG oslo.service.loopingcall [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 816.337756] env[63538]: DEBUG nova.compute.manager [-] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 816.337851] env[63538]: DEBUG nova.network.neutron [-] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 816.364599] env[63538]: INFO nova.network.neutron [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Updating port a5dfe48b-4acc-472c-8e00-f936b4068ea5 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 816.405272] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100982, 'name': ReconfigVM_Task, 'duration_secs': 0.307478} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.405599] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Reconfigured VM instance instance-0000003a to attach disk [datastore1] de68a921-bf67-4794-923d-4e062d8ff802/de68a921-bf67-4794-923d-4e062d8ff802.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 816.406248] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0d3269ec-7a79-4020-a576-db5800ad2659 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.414316] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 816.414316] env[63538]: value = "task-5100984" [ 816.414316] env[63538]: _type = "Task" [ 816.414316] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.425586] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100984, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.616543] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100983, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.691023] env[63538]: DEBUG nova.network.neutron [-] [instance: 6850191a-4190-4795-ae18-830b41a76085] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.760451] env[63538]: DEBUG nova.compute.manager [req-9e2716bf-56db-4a34-8842-9676a23be0a9 req-7b9239fb-8495-4bf3-8def-06587a64000b service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] Received event network-vif-deleted-377fafa3-5b12-4619-8d84-bf0b09188cd6 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 816.760701] env[63538]: DEBUG nova.compute.manager [req-9e2716bf-56db-4a34-8842-9676a23be0a9 req-7b9239fb-8495-4bf3-8def-06587a64000b service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] Received event network-vif-deleted-3700c423-faa4-4788-a282-65acd1bbfe9d {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 816.760898] env[63538]: DEBUG nova.compute.manager [req-9e2716bf-56db-4a34-8842-9676a23be0a9 req-7b9239fb-8495-4bf3-8def-06587a64000b service nova] [instance: 6850191a-4190-4795-ae18-830b41a76085] Received event network-vif-deleted-831b08d0-41bf-439d-8e03-090fc08a5815 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 816.908469] env[63538]: DEBUG nova.compute.manager [req-e7c4bdff-5bc1-49e3-b50d-624057973ea4 req-a08ec373-e32d-4c4c-8860-d726f8a0acd3 service nova] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Received event network-vif-deleted-384175ce-d150-4f04-ad8f-d65790b79e5e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 816.908737] env[63538]: INFO nova.compute.manager [req-e7c4bdff-5bc1-49e3-b50d-624057973ea4 req-a08ec373-e32d-4c4c-8860-d726f8a0acd3 service nova] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Neutron deleted interface 384175ce-d150-4f04-ad8f-d65790b79e5e; detaching it from the instance and deleting it from the info cache [ 816.908962] env[63538]: DEBUG nova.network.neutron [req-e7c4bdff-5bc1-49e3-b50d-624057973ea4 req-a08ec373-e32d-4c4c-8860-d726f8a0acd3 service nova] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.926332] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100984, 'name': Rename_Task, 'duration_secs': 0.156724} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.926865] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 816.927185] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7c661c0a-544c-4eda-bf22-55a7a516f9a9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.936631] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 816.936631] env[63538]: value = "task-5100985" [ 816.936631] env[63538]: _type = "Task" [ 816.936631] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.952885] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100985, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.123526] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100983, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.808295} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.124508] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1/49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 817.124824] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 817.125276] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a7aa713f-e9b5-4122-ac52-85fa16f96bff {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.137211] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 817.137211] env[63538]: value = "task-5100986" [ 817.137211] env[63538]: _type = "Task" [ 817.137211] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.148000] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100986, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.193327] env[63538]: INFO nova.compute.manager [-] [instance: 6850191a-4190-4795-ae18-830b41a76085] Took 1.88 seconds to deallocate network for instance. [ 817.217112] env[63538]: DEBUG nova.network.neutron [-] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.313882] env[63538]: DEBUG oslo_concurrency.lockutils [None req-028bb863-ca42-4b59-ab54-c6944ac1140a tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.315643] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 43.767s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.317419] env[63538]: DEBUG nova.objects.instance [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lazy-loading 'resources' on Instance uuid db5993ce-6982-4b82-8f5d-3fe51df8896b {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 817.414758] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cfa5998e-bfee-40f1-bac3-8a8f23776eed {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.424912] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84afe2d-9765-4a66-a854-bb786522dabd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.458635] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100985, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.471981] env[63538]: DEBUG nova.compute.manager [req-e7c4bdff-5bc1-49e3-b50d-624057973ea4 req-a08ec373-e32d-4c4c-8860-d726f8a0acd3 service nova] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Detach interface failed, port_id=384175ce-d150-4f04-ad8f-d65790b79e5e, reason: Instance 79f4cdd9-219a-4440-9dd2-9b2a360965b1 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 817.650468] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100986, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088769} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.650783] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 817.651688] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a8f4ac-f6b1-45d0-bcf9-d147b70360e0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.679287] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1/49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 817.679608] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8af5cd4-f52b-4984-aacc-f0518fb27417 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.704962] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.705424] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 817.705424] env[63538]: value = "task-5100987" [ 817.705424] env[63538]: _type = "Task" [ 817.705424] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.717859] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100987, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.720551] env[63538]: INFO nova.compute.manager [-] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Took 1.38 seconds to deallocate network for instance. [ 817.954140] env[63538]: DEBUG oslo_vmware.api [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100985, 'name': PowerOnVM_Task, 'duration_secs': 0.805532} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.954624] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 817.954970] env[63538]: INFO nova.compute.manager [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Took 9.51 seconds to spawn the instance on the hypervisor. [ 817.955334] env[63538]: DEBUG nova.compute.manager [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 817.956386] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdcd0c0b-a2a1-4e51-a44f-3c0b0e62c7f3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.218438] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100987, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.227796] env[63538]: DEBUG oslo_concurrency.lockutils [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.401118] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquiring lock "refresh_cache-f9fa5578-acf3-416f-9cb0-8ceb00e5132d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.401361] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquired lock "refresh_cache-f9fa5578-acf3-416f-9cb0-8ceb00e5132d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.401581] env[63538]: DEBUG nova.network.neutron [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 818.476262] env[63538]: INFO nova.compute.manager [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Took 50.40 seconds to build instance. [ 818.535078] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb8ab79-5e08-4f35-be76-61310ab7daa8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.546842] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf597f3-2119-41c1-8179-f220d6ebc4f8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.583770] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27ac0960-61de-48d2-8d5c-4265b8c73cf1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.599445] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da4c283-ea12-4597-b34c-dc6a66231af7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.620571] env[63538]: DEBUG nova.compute.provider_tree [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 818.718015] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100987, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.891344] env[63538]: DEBUG nova.compute.manager [req-70241be9-35f9-43ed-b875-3aa8056de11f req-1db6db3a-59b1-4caf-993c-e73ad96d7361 service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Received event network-vif-plugged-a5dfe48b-4acc-472c-8e00-f936b4068ea5 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 818.891344] env[63538]: DEBUG oslo_concurrency.lockutils [req-70241be9-35f9-43ed-b875-3aa8056de11f req-1db6db3a-59b1-4caf-993c-e73ad96d7361 service nova] Acquiring lock "f9fa5578-acf3-416f-9cb0-8ceb00e5132d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.891344] env[63538]: DEBUG oslo_concurrency.lockutils [req-70241be9-35f9-43ed-b875-3aa8056de11f req-1db6db3a-59b1-4caf-993c-e73ad96d7361 service nova] Lock "f9fa5578-acf3-416f-9cb0-8ceb00e5132d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.891344] env[63538]: DEBUG oslo_concurrency.lockutils [req-70241be9-35f9-43ed-b875-3aa8056de11f req-1db6db3a-59b1-4caf-993c-e73ad96d7361 service nova] Lock "f9fa5578-acf3-416f-9cb0-8ceb00e5132d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.891344] env[63538]: DEBUG nova.compute.manager [req-70241be9-35f9-43ed-b875-3aa8056de11f req-1db6db3a-59b1-4caf-993c-e73ad96d7361 service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] No waiting events found dispatching network-vif-plugged-a5dfe48b-4acc-472c-8e00-f936b4068ea5 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 818.892247] env[63538]: WARNING nova.compute.manager [req-70241be9-35f9-43ed-b875-3aa8056de11f req-1db6db3a-59b1-4caf-993c-e73ad96d7361 service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Received unexpected event network-vif-plugged-a5dfe48b-4acc-472c-8e00-f936b4068ea5 for instance with vm_state shelved_offloaded and task_state spawning. [ 818.892897] env[63538]: DEBUG nova.compute.manager [req-70241be9-35f9-43ed-b875-3aa8056de11f req-1db6db3a-59b1-4caf-993c-e73ad96d7361 service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Received event network-changed-a5dfe48b-4acc-472c-8e00-f936b4068ea5 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 818.893244] env[63538]: DEBUG nova.compute.manager [req-70241be9-35f9-43ed-b875-3aa8056de11f req-1db6db3a-59b1-4caf-993c-e73ad96d7361 service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Refreshing instance network info cache due to event network-changed-a5dfe48b-4acc-472c-8e00-f936b4068ea5. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 818.893581] env[63538]: DEBUG oslo_concurrency.lockutils [req-70241be9-35f9-43ed-b875-3aa8056de11f req-1db6db3a-59b1-4caf-993c-e73ad96d7361 service nova] Acquiring lock "refresh_cache-f9fa5578-acf3-416f-9cb0-8ceb00e5132d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.977827] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9815d433-6301-4704-aaba-26411f71842a tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "de68a921-bf67-4794-923d-4e062d8ff802" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.250s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.093910] env[63538]: INFO nova.compute.manager [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Rescuing [ 819.094254] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "refresh_cache-de68a921-bf67-4794-923d-4e062d8ff802" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.095709] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquired lock "refresh_cache-de68a921-bf67-4794-923d-4e062d8ff802" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.095709] env[63538]: DEBUG nova.network.neutron [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 819.122568] env[63538]: DEBUG nova.scheduler.client.report [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 819.222061] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100987, 'name': ReconfigVM_Task, 'duration_secs': 1.137217} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.222440] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Reconfigured VM instance instance-0000003b to attach disk [datastore2] 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1/49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 819.223212] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-10c55051-d193-47fd-aceb-ac8aca9ac1d0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.232140] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 819.232140] env[63538]: value = "task-5100988" [ 819.232140] env[63538]: _type = "Task" [ 819.232140] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.243436] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100988, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.354805] env[63538]: DEBUG nova.network.neutron [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Updating instance_info_cache with network_info: [{"id": "a5dfe48b-4acc-472c-8e00-f936b4068ea5", "address": "fa:16:3e:a9:56:1a", "network": {"id": "4fd42fb1-fcc5-467d-88b7-1a5ab3297631", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-9303923-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c5e6ed681ed4078bd9115b30f419d9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5dfe48b-4a", "ovs_interfaceid": "a5dfe48b-4acc-472c-8e00-f936b4068ea5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.631386] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.315s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.636667] env[63538]: DEBUG oslo_concurrency.lockutils [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 43.594s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.671997] env[63538]: INFO nova.scheduler.client.report [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Deleted allocations for instance db5993ce-6982-4b82-8f5d-3fe51df8896b [ 819.744643] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100988, 'name': Rename_Task, 'duration_secs': 0.15227} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.747861] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 819.748405] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b44d52c0-eb95-44b6-9d15-1dec2d7cea19 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.757800] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 819.757800] env[63538]: value = "task-5100989" [ 819.757800] env[63538]: _type = "Task" [ 819.757800] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.769947] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100989, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.858573] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Releasing lock "refresh_cache-f9fa5578-acf3-416f-9cb0-8ceb00e5132d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.863140] env[63538]: DEBUG oslo_concurrency.lockutils [req-70241be9-35f9-43ed-b875-3aa8056de11f req-1db6db3a-59b1-4caf-993c-e73ad96d7361 service nova] Acquired lock "refresh_cache-f9fa5578-acf3-416f-9cb0-8ceb00e5132d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.863140] env[63538]: DEBUG nova.network.neutron [req-70241be9-35f9-43ed-b875-3aa8056de11f req-1db6db3a-59b1-4caf-993c-e73ad96d7361 service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Refreshing network info cache for port a5dfe48b-4acc-472c-8e00-f936b4068ea5 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 819.912254] env[63538]: DEBUG nova.virt.hardware [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c0ca35a4969b70e6c826c8af98f74c9f',container_format='bare',created_at=2025-12-12T12:53:04Z,direct_url=,disk_format='vmdk',id=385b766b-e27c-4c97-87a2-473b5485f688,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-724033568-shelved',owner='6c5e6ed681ed4078bd9115b30f419d9a',properties=ImageMetaProps,protected=,size=31666688,status='active',tags=,updated_at=2025-12-12T12:53:19Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 819.912562] env[63538]: DEBUG nova.virt.hardware [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 819.912854] env[63538]: DEBUG nova.virt.hardware [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 819.912930] env[63538]: DEBUG nova.virt.hardware [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 819.913487] env[63538]: DEBUG nova.virt.hardware [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 819.913839] env[63538]: DEBUG nova.virt.hardware [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 819.914804] env[63538]: DEBUG nova.virt.hardware [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 819.914804] env[63538]: DEBUG nova.virt.hardware [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 819.914804] env[63538]: DEBUG nova.virt.hardware [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 819.915242] env[63538]: DEBUG nova.virt.hardware [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 819.915446] env[63538]: DEBUG nova.virt.hardware [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 819.916462] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a174dc60-609c-4f3d-a750-d557537ed32d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.931521] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b8fea9-1a0a-46d5-b9a8-ba8b0345be75 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.951758] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:56:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b399c74-1411-408a-b4cd-84e268ae83fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a5dfe48b-4acc-472c-8e00-f936b4068ea5', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 819.960132] env[63538]: DEBUG oslo.service.loopingcall [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 819.966166] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 819.967030] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ee6445e9-f28c-48ca-b53c-f2e9d5acb230 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.991706] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 819.991706] env[63538]: value = "task-5100990" [ 819.991706] env[63538]: _type = "Task" [ 819.991706] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.001646] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100990, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.177861] env[63538]: DEBUG nova.network.neutron [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Updating instance_info_cache with network_info: [{"id": "ca7d9759-5177-4ea2-a411-3bae4181182d", "address": "fa:16:3e:67:5a:a6", "network": {"id": "549f45d6-3d4f-4476-92ba-4bd87fefba51", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-822672880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55edcd65da7b4a569a4c27aab4819cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ded8bac-871f-491b-94ec-cb67c08bc828", "external-id": "nsx-vlan-transportzone-212", "segmentation_id": 212, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca7d9759-51", "ovs_interfaceid": "ca7d9759-5177-4ea2-a411-3bae4181182d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.188205] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ee2f82b-2e2d-42e9-91d1-c52af1b9f860 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "db5993ce-6982-4b82-8f5d-3fe51df8896b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.736s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.241477] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a7f42a-a4f6-437d-9828-ada34449c320 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.251664] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8398863a-6ee2-445e-a237-be407a5d99aa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.293874] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-244a12f8-40b4-475d-bcd6-5e3d37d6c191 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.304518] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100989, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.308343] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5068bfc1-2efc-4636-94e2-1cbe3cce08ef {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.328426] env[63538]: DEBUG nova.compute.provider_tree [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 820.509574] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100990, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.663702] env[63538]: DEBUG nova.network.neutron [req-70241be9-35f9-43ed-b875-3aa8056de11f req-1db6db3a-59b1-4caf-993c-e73ad96d7361 service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Updated VIF entry in instance network info cache for port a5dfe48b-4acc-472c-8e00-f936b4068ea5. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 820.663871] env[63538]: DEBUG nova.network.neutron [req-70241be9-35f9-43ed-b875-3aa8056de11f req-1db6db3a-59b1-4caf-993c-e73ad96d7361 service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Updating instance_info_cache with network_info: [{"id": "a5dfe48b-4acc-472c-8e00-f936b4068ea5", "address": "fa:16:3e:a9:56:1a", "network": {"id": "4fd42fb1-fcc5-467d-88b7-1a5ab3297631", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-9303923-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c5e6ed681ed4078bd9115b30f419d9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5dfe48b-4a", "ovs_interfaceid": "a5dfe48b-4acc-472c-8e00-f936b4068ea5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.684232] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Releasing lock "refresh_cache-de68a921-bf67-4794-923d-4e062d8ff802" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.773334] env[63538]: DEBUG oslo_vmware.api [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5100989, 'name': PowerOnVM_Task, 'duration_secs': 0.617584} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.773648] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 820.773942] env[63538]: INFO nova.compute.manager [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Took 9.51 seconds to spawn the instance on the hypervisor. [ 820.774386] env[63538]: DEBUG nova.compute.manager [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 820.775480] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1bc0387-d645-4220-bc2f-ea81b5dbc13f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.836535] env[63538]: DEBUG nova.scheduler.client.report [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 821.005696] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100990, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.168856] env[63538]: DEBUG oslo_concurrency.lockutils [req-70241be9-35f9-43ed-b875-3aa8056de11f req-1db6db3a-59b1-4caf-993c-e73ad96d7361 service nova] Releasing lock "refresh_cache-f9fa5578-acf3-416f-9cb0-8ceb00e5132d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.227263] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 821.227623] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e60bc1a3-c5d5-40a8-9ad1-757fbfb0b6dc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.238382] env[63538]: DEBUG oslo_vmware.api [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 821.238382] env[63538]: value = "task-5100991" [ 821.238382] env[63538]: _type = "Task" [ 821.238382] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.249971] env[63538]: DEBUG oslo_vmware.api [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100991, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.309118] env[63538]: INFO nova.compute.manager [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Took 51.42 seconds to build instance. [ 821.347214] env[63538]: DEBUG oslo_concurrency.lockutils [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.711s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.347655] env[63538]: INFO nova.compute.manager [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Successfully reverted task state from image_uploading on failure for instance. [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server [None req-74c35ba7-bb44-4c52-9492-f93d7e7abe56 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Exception during message handling: oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-992349' has already been deleted or has not been completely created [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 172, in _process_incoming [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server raise self.value [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server raise self.value [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server raise self.value [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 233, in decorated_function [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server raise self.value [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 230, in decorated_function [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server return function(self, context, image_id, instance, [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 4443, in snapshot_instance [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server self._snapshot_instance(context, image_id, instance, [ 821.358097] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 4476, in _snapshot_instance [ 821.360734] env[63538]: ERROR oslo_messaging.rpc.server self.driver.snapshot(context, instance, image_id, [ 821.360734] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 565, in snapshot [ 821.360734] env[63538]: ERROR oslo_messaging.rpc.server self._vmops.snapshot(context, instance, image_id, update_task_state) [ 821.360734] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1034, in snapshot [ 821.360734] env[63538]: ERROR oslo_messaging.rpc.server self._delete_vm_snapshot(instance, vm_ref, snapshot_ref) [ 821.360734] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/decorator.py", line 232, in fun [ 821.360734] env[63538]: ERROR oslo_messaging.rpc.server return caller(func, *(extras + args), **kw) [ 821.360734] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 124, in retry_if_task_in_progress [ 821.360734] env[63538]: ERROR oslo_messaging.rpc.server f(*args, **kwargs) [ 821.360734] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 937, in _delete_vm_snapshot [ 821.360734] env[63538]: ERROR oslo_messaging.rpc.server self._session._wait_for_task(delete_snapshot_task) [ 821.360734] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 821.360734] env[63538]: ERROR oslo_messaging.rpc.server return self.wait_for_task(task_ref) [ 821.360734] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 821.360734] env[63538]: ERROR oslo_messaging.rpc.server return evt.wait() [ 821.360734] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 821.360734] env[63538]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 821.360734] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 821.360734] env[63538]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 821.360734] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 821.360734] env[63538]: ERROR oslo_messaging.rpc.server self.f(*self.args, **self.kw) [ 821.360734] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 821.360734] env[63538]: ERROR oslo_messaging.rpc.server raise exceptions.translate_fault(task_info.error) [ 821.360734] env[63538]: ERROR oslo_messaging.rpc.server oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-992349' has already been deleted or has not been completely created [ 821.360734] env[63538]: ERROR oslo_messaging.rpc.server [ 821.360734] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.872s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.360734] env[63538]: DEBUG nova.objects.instance [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lazy-loading 'resources' on Instance uuid 04dc612b-7987-405b-9716-95c4ff3535ec {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 821.506875] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100990, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.747351] env[63538]: DEBUG oslo_vmware.api [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100991, 'name': PowerOffVM_Task, 'duration_secs': 0.223744} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.747637] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 821.748436] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a809011-4e72-4308-9467-ca920a955479 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.768405] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f09c011-fde4-49d4-9395-fa2199a09702 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.810945] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 821.811508] env[63538]: DEBUG oslo_concurrency.lockutils [None req-84fe4b88-066a-4d80-8c38-c969698fc1b8 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.254s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.811738] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-84baa656-997c-472d-908a-2054a9bde293 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.823021] env[63538]: DEBUG oslo_vmware.api [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 821.823021] env[63538]: value = "task-5100992" [ 821.823021] env[63538]: _type = "Task" [ 821.823021] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.829038] env[63538]: DEBUG oslo_vmware.api [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100992, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.006880] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100990, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.032775] env[63538]: DEBUG oslo_concurrency.lockutils [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquiring lock "e79a9eeb-a4c4-4613-bc43-4e40103addf9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.033080] env[63538]: DEBUG oslo_concurrency.lockutils [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Lock "e79a9eeb-a4c4-4613-bc43-4e40103addf9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.334413] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] VM already powered off {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 822.334413] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 822.334413] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.335081] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.335081] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 822.335081] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5dcffbd9-4fd5-499f-bb51-0546a646bab5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.347361] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 822.347553] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 822.348296] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a633e4d3-fca6-4082-8b2a-4972c9d94cf1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.358382] env[63538]: DEBUG oslo_vmware.api [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 822.358382] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e0671e-4a1e-bac8-9b37-053a62994609" [ 822.358382] env[63538]: _type = "Task" [ 822.358382] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.366864] env[63538]: DEBUG oslo_vmware.api [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e0671e-4a1e-bac8-9b37-053a62994609, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.421598] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b03347-b08e-40f3-96eb-53f8df69f8e7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.430436] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2363c22e-9f6e-4364-97a8-bdfbf5b55be4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.462535] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e72fdf86-87ef-466e-82b2-8e29158b2795 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.470977] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b9492c-bc64-4aa4-beca-125898f77b04 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.488039] env[63538]: DEBUG nova.compute.provider_tree [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 822.505621] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100990, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.536553] env[63538]: DEBUG nova.compute.manager [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 822.828193] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquiring lock "edc670dd-732a-4c54-924c-c99ee539d4d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.828485] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "edc670dd-732a-4c54-924c-c99ee539d4d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.870865] env[63538]: DEBUG oslo_vmware.api [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e0671e-4a1e-bac8-9b37-053a62994609, 'name': SearchDatastore_Task, 'duration_secs': 0.028137} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.872623] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cfe5644b-ae15-4c82-87f5-eff3c572bdf2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.877856] env[63538]: DEBUG oslo_vmware.api [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 822.877856] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c74da8-c1c1-f461-2435-1d27cafebc8c" [ 822.877856] env[63538]: _type = "Task" [ 822.877856] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.887477] env[63538]: DEBUG oslo_vmware.api [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c74da8-c1c1-f461-2435-1d27cafebc8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.995428] env[63538]: DEBUG nova.scheduler.client.report [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 823.009218] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5100990, 'name': CreateVM_Task, 'duration_secs': 3.008422} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.009499] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 823.010279] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/385b766b-e27c-4c97-87a2-473b5485f688" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 823.010394] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquired lock "[datastore1] devstack-image-cache_base/385b766b-e27c-4c97-87a2-473b5485f688" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.010723] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/385b766b-e27c-4c97-87a2-473b5485f688" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 823.010999] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12f6d016-d8a7-4792-b0eb-23d888bd6e7d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.017634] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 823.017634] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d0f1f3-744a-6a6e-ad68-2043b3e5737e" [ 823.017634] env[63538]: _type = "Task" [ 823.017634] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.027650] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d0f1f3-744a-6a6e-ad68-2043b3e5737e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.058382] env[63538]: DEBUG oslo_concurrency.lockutils [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.123798] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e86f5c32-5bbb-46e5-abb6-5a0a72a76d0f tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.123990] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e86f5c32-5bbb-46e5-abb6-5a0a72a76d0f tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.334054] env[63538]: DEBUG nova.compute.manager [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 823.389670] env[63538]: DEBUG oslo_vmware.api [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c74da8-c1c1-f461-2435-1d27cafebc8c, 'name': SearchDatastore_Task, 'duration_secs': 0.012338} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.390167] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 823.390232] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] de68a921-bf67-4794-923d-4e062d8ff802/faabbca4-e27b-433a-b93d-f059fd73bc92-rescue.vmdk. {{(pid=63538) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 823.390509] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6ee0de6d-35bd-4dee-af5a-83007cdcaccc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.400109] env[63538]: DEBUG oslo_vmware.api [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 823.400109] env[63538]: value = "task-5100993" [ 823.400109] env[63538]: _type = "Task" [ 823.400109] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.408995] env[63538]: DEBUG oslo_vmware.api [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100993, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.501338] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.143s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.503715] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 46.588s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.503950] env[63538]: DEBUG nova.objects.instance [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lazy-loading 'resources' on Instance uuid 707a79e2-f5db-479c-b719-1e040935cda3 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 823.523078] env[63538]: INFO nova.scheduler.client.report [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Deleted allocations for instance 04dc612b-7987-405b-9716-95c4ff3535ec [ 823.531962] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Releasing lock "[datastore1] devstack-image-cache_base/385b766b-e27c-4c97-87a2-473b5485f688" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 823.532251] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Processing image 385b766b-e27c-4c97-87a2-473b5485f688 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 823.532496] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/385b766b-e27c-4c97-87a2-473b5485f688/385b766b-e27c-4c97-87a2-473b5485f688.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 823.532649] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquired lock "[datastore1] devstack-image-cache_base/385b766b-e27c-4c97-87a2-473b5485f688/385b766b-e27c-4c97-87a2-473b5485f688.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.532830] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 823.533426] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-62bac42e-fea7-4ab2-a81f-2010edaf6ae6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.553336] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 823.553535] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 823.554402] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-963752bc-bfea-46c5-82b9-09181c18f79d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.562109] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 823.562109] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524d1a5f-35ec-1720-fd59-3927501cc0cb" [ 823.562109] env[63538]: _type = "Task" [ 823.562109] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.570555] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524d1a5f-35ec-1720-fd59-3927501cc0cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.627698] env[63538]: DEBUG nova.compute.utils [None req-e86f5c32-5bbb-46e5-abb6-5a0a72a76d0f tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 823.853788] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.911079] env[63538]: DEBUG oslo_vmware.api [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100993, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.032135] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c1379968-bf2d-4b16-9f3e-1275b16df108 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "04dc612b-7987-405b-9716-95c4ff3535ec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.082s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.075295] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Preparing fetch location {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 824.075577] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Fetch image to [datastore1] OSTACK_IMG_b4585d7d-24f5-4c37-a020-5c07dcb72974/OSTACK_IMG_b4585d7d-24f5-4c37-a020-5c07dcb72974.vmdk {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 824.075769] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Downloading stream optimized image 385b766b-e27c-4c97-87a2-473b5485f688 to [datastore1] OSTACK_IMG_b4585d7d-24f5-4c37-a020-5c07dcb72974/OSTACK_IMG_b4585d7d-24f5-4c37-a020-5c07dcb72974.vmdk on the data store datastore1 as vApp {{(pid=63538) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 824.075945] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Downloading image file data 385b766b-e27c-4c97-87a2-473b5485f688 to the ESX as VM named 'OSTACK_IMG_b4585d7d-24f5-4c37-a020-5c07dcb72974' {{(pid=63538) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 824.130818] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e86f5c32-5bbb-46e5-abb6-5a0a72a76d0f tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.173972] env[63538]: DEBUG oslo_vmware.rw_handles [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 824.173972] env[63538]: value = "resgroup-9" [ 824.173972] env[63538]: _type = "ResourcePool" [ 824.173972] env[63538]: }. {{(pid=63538) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 824.174296] env[63538]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-7c50bec2-e7b4-458a-8997-f8d054465f5d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.203177] env[63538]: DEBUG oslo_vmware.rw_handles [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lease: (returnval){ [ 824.203177] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52301634-95e2-00f5-4400-12dc936b61e7" [ 824.203177] env[63538]: _type = "HttpNfcLease" [ 824.203177] env[63538]: } obtained for vApp import into resource pool (val){ [ 824.203177] env[63538]: value = "resgroup-9" [ 824.203177] env[63538]: _type = "ResourcePool" [ 824.203177] env[63538]: }. {{(pid=63538) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 824.203496] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the lease: (returnval){ [ 824.203496] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52301634-95e2-00f5-4400-12dc936b61e7" [ 824.203496] env[63538]: _type = "HttpNfcLease" [ 824.203496] env[63538]: } to be ready. {{(pid=63538) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 824.215555] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 824.215555] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52301634-95e2-00f5-4400-12dc936b61e7" [ 824.215555] env[63538]: _type = "HttpNfcLease" [ 824.215555] env[63538]: } is initializing. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 824.415186] env[63538]: DEBUG oslo_vmware.api [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100993, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.562301] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7634d085-3e92-4959-85c9-b2a78959f312 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.571515] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc2aa3e-07fb-4769-b1d3-ab32b169875c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.604614] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ffcb2b2-00cf-4882-8c12-c74b7b8c929d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.613465] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a3fc7e7-dcce-4223-a080-c403636eef48 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.630181] env[63538]: DEBUG nova.compute.provider_tree [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 824.714478] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 824.714478] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52301634-95e2-00f5-4400-12dc936b61e7" [ 824.714478] env[63538]: _type = "HttpNfcLease" [ 824.714478] env[63538]: } is initializing. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 824.915290] env[63538]: DEBUG oslo_vmware.api [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100993, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.058319} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.915633] env[63538]: INFO nova.virt.vmwareapi.ds_util [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] de68a921-bf67-4794-923d-4e062d8ff802/faabbca4-e27b-433a-b93d-f059fd73bc92-rescue.vmdk. [ 824.916470] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9896cfda-66f0-4f6c-91ea-5e872e9d220b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.942881] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] de68a921-bf67-4794-923d-4e062d8ff802/faabbca4-e27b-433a-b93d-f059fd73bc92-rescue.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 824.943239] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b353149-3c86-4af4-bd12-ca29e6edabf4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.962203] env[63538]: DEBUG oslo_vmware.api [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 824.962203] env[63538]: value = "task-5100995" [ 824.962203] env[63538]: _type = "Task" [ 824.962203] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.971270] env[63538]: DEBUG oslo_vmware.api [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100995, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.132718] env[63538]: DEBUG nova.scheduler.client.report [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 825.211057] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e86f5c32-5bbb-46e5-abb6-5a0a72a76d0f tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.211326] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e86f5c32-5bbb-46e5-abb6-5a0a72a76d0f tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.211442] env[63538]: INFO nova.compute.manager [None req-e86f5c32-5bbb-46e5-abb6-5a0a72a76d0f tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Attaching volume 4ef803cc-0326-403f-933d-3af3fce6d68c to /dev/sdb [ 825.218104] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 825.218104] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52301634-95e2-00f5-4400-12dc936b61e7" [ 825.218104] env[63538]: _type = "HttpNfcLease" [ 825.218104] env[63538]: } is ready. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 825.220983] env[63538]: DEBUG oslo_vmware.rw_handles [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 825.220983] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52301634-95e2-00f5-4400-12dc936b61e7" [ 825.220983] env[63538]: _type = "HttpNfcLease" [ 825.220983] env[63538]: }. {{(pid=63538) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 825.222099] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-991f4f85-933d-4fbd-b68d-c480c36f74b3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.231938] env[63538]: DEBUG oslo_vmware.rw_handles [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc1126-e425-3aaf-9cce-cc2bae89505a/disk-0.vmdk from lease info. {{(pid=63538) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 825.232153] env[63538]: DEBUG oslo_vmware.rw_handles [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Creating HTTP connection to write to file with size = 31666688 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc1126-e425-3aaf-9cce-cc2bae89505a/disk-0.vmdk. {{(pid=63538) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 825.288016] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee67401-7b0d-40a0-b49b-9c45115deb30 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.298214] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-09acc475-aa3d-43c3-be2e-713318f9fce5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.300806] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43a59fc4-86a8-45a0-a96b-90ea98cf585e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.315761] env[63538]: DEBUG nova.virt.block_device [None req-e86f5c32-5bbb-46e5-abb6-5a0a72a76d0f tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Updating existing volume attachment record: 26e3fd38-7c89-4cf8-8a43-4440eca99b8e {{(pid=63538) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 825.473027] env[63538]: DEBUG oslo_vmware.api [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100995, 'name': ReconfigVM_Task, 'duration_secs': 0.302491} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.473439] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Reconfigured VM instance instance-0000003a to attach disk [datastore1] de68a921-bf67-4794-923d-4e062d8ff802/faabbca4-e27b-433a-b93d-f059fd73bc92-rescue.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 825.474523] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01bfe3ab-622a-457a-8f2d-4004b23c30f6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.507163] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e3e0944-e8f7-476c-8a59-3c9a9027e96b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.527649] env[63538]: DEBUG oslo_vmware.api [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 825.527649] env[63538]: value = "task-5100997" [ 825.527649] env[63538]: _type = "Task" [ 825.527649] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.538016] env[63538]: DEBUG oslo_vmware.api [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100997, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.641274] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.137s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.645783] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 43.987s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.646230] env[63538]: DEBUG nova.objects.instance [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63538) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 825.669270] env[63538]: INFO nova.scheduler.client.report [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Deleted allocations for instance 707a79e2-f5db-479c-b719-1e040935cda3 [ 826.040850] env[63538]: DEBUG oslo_vmware.api [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5100997, 'name': ReconfigVM_Task, 'duration_secs': 0.191001} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.041212] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 826.041486] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9fa7ba84-b307-4b2b-b7b0-89d2f7bdc335 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.049760] env[63538]: DEBUG oslo_vmware.api [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 826.049760] env[63538]: value = "task-5101000" [ 826.049760] env[63538]: _type = "Task" [ 826.049760] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.060860] env[63538]: DEBUG oslo_vmware.api [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101000, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.179453] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a578f839-f4bf-4351-8d0f-da23955728be tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "707a79e2-f5db-479c-b719-1e040935cda3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 52.164s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.497989] env[63538]: DEBUG oslo_vmware.rw_handles [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Completed reading data from the image iterator. {{(pid=63538) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 826.498479] env[63538]: DEBUG oslo_vmware.rw_handles [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc1126-e425-3aaf-9cce-cc2bae89505a/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 826.499363] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbde3498-e18c-4951-9c1d-e0cd48cf9c9c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.508176] env[63538]: DEBUG oslo_vmware.rw_handles [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc1126-e425-3aaf-9cce-cc2bae89505a/disk-0.vmdk is in state: ready. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 826.508419] env[63538]: DEBUG oslo_vmware.rw_handles [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc1126-e425-3aaf-9cce-cc2bae89505a/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 826.508689] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-7f2a6a1c-c93f-4c23-af7b-1c4bb5187324 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.562374] env[63538]: DEBUG oslo_vmware.api [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101000, 'name': PowerOnVM_Task, 'duration_secs': 0.456748} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.562683] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 826.565746] env[63538]: DEBUG nova.compute.manager [None req-4d8b1b82-8b24-4708-9e4c-058107f88bba tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 826.566616] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca24803a-d31d-41b1-892a-7839c7eb92fb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.659785] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff644a12-c8dc-41da-b8aa-c24b34c6ad2b tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.659785] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.659s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.659785] env[63538]: DEBUG nova.objects.instance [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lazy-loading 'resources' on Instance uuid e50e95c0-830b-4d71-999b-546b138bf8f4 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 826.720329] env[63538]: DEBUG oslo_vmware.rw_handles [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc1126-e425-3aaf-9cce-cc2bae89505a/disk-0.vmdk. {{(pid=63538) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 826.720594] env[63538]: INFO nova.virt.vmwareapi.images [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Downloaded image file data 385b766b-e27c-4c97-87a2-473b5485f688 [ 826.721453] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c563038-f9a2-4e37-996b-3c29f5f1f0e3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.743193] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0725ca40-5292-4e6c-a929-4f29ef9bdb1f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.771438] env[63538]: INFO nova.virt.vmwareapi.images [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] The imported VM was unregistered [ 826.774055] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Caching image {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 826.774360] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Creating directory with path [datastore1] devstack-image-cache_base/385b766b-e27c-4c97-87a2-473b5485f688 {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 826.774658] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-22253721-a719-4552-935b-b33529867e2d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.798528] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Created directory with path [datastore1] devstack-image-cache_base/385b766b-e27c-4c97-87a2-473b5485f688 {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 826.798743] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_b4585d7d-24f5-4c37-a020-5c07dcb72974/OSTACK_IMG_b4585d7d-24f5-4c37-a020-5c07dcb72974.vmdk to [datastore1] devstack-image-cache_base/385b766b-e27c-4c97-87a2-473b5485f688/385b766b-e27c-4c97-87a2-473b5485f688.vmdk. {{(pid=63538) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 826.799017] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-603bc821-2933-4142-bee5-479b7491b6d3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.806450] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 826.806450] env[63538]: value = "task-5101002" [ 826.806450] env[63538]: _type = "Task" [ 826.806450] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.815134] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101002, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.317240] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101002, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.593108] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-407defe0-389d-4eea-8ad8-f865ad78f5e5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.602922] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f40daf-ef74-43c1-9add-19c92b4c1a0d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.641030] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6630c7-9170-4922-b42a-630b71c899ca {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.650151] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0804f45e-6b46-4de1-9290-2b5a0e135523 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.667283] env[63538]: DEBUG nova.compute.provider_tree [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 827.820054] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101002, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.171291] env[63538]: DEBUG nova.scheduler.client.report [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 828.325234] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101002, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.680917] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.021s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.685818] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.007s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.686308] env[63538]: DEBUG nova.objects.instance [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Lazy-loading 'resources' on Instance uuid 4e89aa25-fb4a-430d-ab87-feff57b73780 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 828.743833] env[63538]: INFO nova.scheduler.client.report [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Deleted allocations for instance e50e95c0-830b-4d71-999b-546b138bf8f4 [ 828.824945] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101002, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.253797] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d75788c3-ff8b-49ee-a75c-b482c8fd1024 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "e50e95c0-830b-4d71-999b-546b138bf8f4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.307s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.321955] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101002, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.386267] env[63538]: INFO nova.compute.manager [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Rescuing [ 829.386561] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "refresh_cache-87f8bb3e-6f32-4850-ac54-efad0befb268" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.386721] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquired lock "refresh_cache-87f8bb3e-6f32-4850-ac54-efad0befb268" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.387053] env[63538]: DEBUG nova.network.neutron [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 829.675921] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b392b0-67b7-4acd-87a6-c122dfd70c80 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.684992] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc91791-d211-4376-8a03-e84beba144f7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.719747] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aac8c74-0c0a-402c-9f7e-ab1261fa2aa0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.729572] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-175a6dc7-70e9-472a-80fe-25b48abc6f74 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.746421] env[63538]: DEBUG nova.compute.provider_tree [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 829.822478] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101002, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.213695] env[63538]: DEBUG nova.network.neutron [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Updating instance_info_cache with network_info: [{"id": "6edf1839-d708-46ad-836d-e2ecac08730f", "address": "fa:16:3e:0c:3b:b0", "network": {"id": "549f45d6-3d4f-4476-92ba-4bd87fefba51", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-822672880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55edcd65da7b4a569a4c27aab4819cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ded8bac-871f-491b-94ec-cb67c08bc828", "external-id": "nsx-vlan-transportzone-212", "segmentation_id": 212, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6edf1839-d7", "ovs_interfaceid": "6edf1839-d708-46ad-836d-e2ecac08730f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.248785] env[63538]: DEBUG nova.scheduler.client.report [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 830.326333] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101002, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.324811} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.326746] env[63538]: INFO nova.virt.vmwareapi.ds_util [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_b4585d7d-24f5-4c37-a020-5c07dcb72974/OSTACK_IMG_b4585d7d-24f5-4c37-a020-5c07dcb72974.vmdk to [datastore1] devstack-image-cache_base/385b766b-e27c-4c97-87a2-473b5485f688/385b766b-e27c-4c97-87a2-473b5485f688.vmdk. [ 830.327063] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Cleaning up location [datastore1] OSTACK_IMG_b4585d7d-24f5-4c37-a020-5c07dcb72974 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 830.327345] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_b4585d7d-24f5-4c37-a020-5c07dcb72974 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 830.327714] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-95235f28-556d-4e83-ad8b-53f4d1771277 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.337102] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 830.337102] env[63538]: value = "task-5101004" [ 830.337102] env[63538]: _type = "Task" [ 830.337102] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.349287] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101004, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.396176] env[63538]: DEBUG oslo_concurrency.lockutils [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "bd222761-92aa-4f2c-a752-ead9c498ee7a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.396363] env[63538]: DEBUG oslo_concurrency.lockutils [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "bd222761-92aa-4f2c-a752-ead9c498ee7a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.396654] env[63538]: DEBUG oslo_concurrency.lockutils [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "bd222761-92aa-4f2c-a752-ead9c498ee7a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.396868] env[63538]: DEBUG oslo_concurrency.lockutils [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "bd222761-92aa-4f2c-a752-ead9c498ee7a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.397061] env[63538]: DEBUG oslo_concurrency.lockutils [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "bd222761-92aa-4f2c-a752-ead9c498ee7a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.399404] env[63538]: INFO nova.compute.manager [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Terminating instance [ 830.401733] env[63538]: DEBUG nova.compute.manager [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 830.401733] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 830.402384] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8adbdbf4-3e68-4eb0-9875-24ac3ca77706 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.410786] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 830.411058] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b09250d8-4531-41f4-a061-66f255ebe20d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.418088] env[63538]: DEBUG oslo_vmware.api [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 830.418088] env[63538]: value = "task-5101005" [ 830.418088] env[63538]: _type = "Task" [ 830.418088] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.428640] env[63538]: DEBUG oslo_vmware.api [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5101005, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.723086] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Releasing lock "refresh_cache-87f8bb3e-6f32-4850-ac54-efad0befb268" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.753779] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.069s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.756329] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 40.401s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.756565] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.756746] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63538) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 830.757077] env[63538]: DEBUG oslo_concurrency.lockutils [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.072s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.757326] env[63538]: DEBUG nova.objects.instance [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Lazy-loading 'resources' on Instance uuid 736b110e-7265-42cc-9c9b-35f57c466b0c {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 830.759751] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52e18927-65b2-4e1a-b198-0cf4202fc250 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.769434] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a14a1fb4-7433-4eb5-a03c-a8428980285a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.786235] env[63538]: INFO nova.scheduler.client.report [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Deleted allocations for instance 4e89aa25-fb4a-430d-ab87-feff57b73780 [ 830.787847] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f98b49f-f916-4fdd-acd9-ab091057d5e5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.797826] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55857a55-5f2d-4275-a24c-6bffb0530729 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.828261] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178470MB free_disk=95GB free_vcpus=48 pci_devices=None {{(pid=63538) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 830.828447] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.847505] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101004, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.219578} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.848265] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 830.848265] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Releasing lock "[datastore1] devstack-image-cache_base/385b766b-e27c-4c97-87a2-473b5485f688/385b766b-e27c-4c97-87a2-473b5485f688.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.848631] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/385b766b-e27c-4c97-87a2-473b5485f688/385b766b-e27c-4c97-87a2-473b5485f688.vmdk to [datastore1] f9fa5578-acf3-416f-9cb0-8ceb00e5132d/f9fa5578-acf3-416f-9cb0-8ceb00e5132d.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 830.848993] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c1ee20bf-4666-4604-912e-804bf07d9cfb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.858823] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 830.858823] env[63538]: value = "task-5101006" [ 830.858823] env[63538]: _type = "Task" [ 830.858823] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.869031] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101006, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.931537] env[63538]: DEBUG oslo_vmware.api [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5101005, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.254813] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 831.255111] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-67b3b0ca-51ee-4faa-8dc0-d2bba878f71e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.264603] env[63538]: DEBUG oslo_vmware.api [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 831.264603] env[63538]: value = "task-5101007" [ 831.264603] env[63538]: _type = "Task" [ 831.264603] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.276528] env[63538]: DEBUG oslo_vmware.api [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101007, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.297971] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ec1b40ea-93dc-4c1d-a46e-fc7353176092 tempest-ImagesOneServerTestJSON-1476191925 tempest-ImagesOneServerTestJSON-1476191925-project-member] Lock "4e89aa25-fb4a-430d-ab87-feff57b73780" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.625s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.372913] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101006, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.430357] env[63538]: DEBUG oslo_vmware.api [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5101005, 'name': PowerOffVM_Task, 'duration_secs': 0.565622} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.435200] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 831.435449] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 831.436025] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0bb8957d-506c-4a60-8b26-37a05d355847 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.747887] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ffa692-91f6-459c-a832-d699fd73c0b9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.756792] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85fd4f20-13c0-49e3-958e-09611d6d5dff {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.796792] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c24af9a6-08e8-4c6a-a95f-1387d4a0c9ae {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.808973] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a6a9c9-e846-4a16-9f86-71e6a4f00891 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.812926] env[63538]: DEBUG oslo_vmware.api [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101007, 'name': PowerOffVM_Task, 'duration_secs': 0.239678} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.813265] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 831.814568] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76a91b2-69d0-4213-8411-8f28facf9586 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.827247] env[63538]: DEBUG nova.compute.provider_tree [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 831.849781] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba6d577f-f3ca-4f76-8a58-99fef278a28d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.871370] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101006, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.888972] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 831.889963] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98202333-7897-4ada-a675-978aca8a2ae3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.898337] env[63538]: DEBUG oslo_vmware.api [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 831.898337] env[63538]: value = "task-5101009" [ 831.898337] env[63538]: _type = "Task" [ 831.898337] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.908247] env[63538]: DEBUG oslo_vmware.api [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101009, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.304752] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 832.305033] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 832.305826] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Deleting the datastore file [datastore2] bd222761-92aa-4f2c-a752-ead9c498ee7a {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 832.306365] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bccf6c06-aeb1-4e79-ba92-2a2753eedc53 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.316243] env[63538]: DEBUG oslo_vmware.api [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 832.316243] env[63538]: value = "task-5101010" [ 832.316243] env[63538]: _type = "Task" [ 832.316243] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.333017] env[63538]: DEBUG nova.scheduler.client.report [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 832.341563] env[63538]: DEBUG oslo_vmware.api [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5101010, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.373730] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101006, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.375220] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e86f5c32-5bbb-46e5-abb6-5a0a72a76d0f tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Volume attach. Driver type: vmdk {{(pid=63538) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 832.375559] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e86f5c32-5bbb-46e5-abb6-5a0a72a76d0f tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992397', 'volume_id': '4ef803cc-0326-403f-933d-3af3fce6d68c', 'name': 'volume-4ef803cc-0326-403f-933d-3af3fce6d68c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1', 'attached_at': '', 'detached_at': '', 'volume_id': '4ef803cc-0326-403f-933d-3af3fce6d68c', 'serial': '4ef803cc-0326-403f-933d-3af3fce6d68c'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 832.376684] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b9f96f6-5ca6-46db-ae95-dbcfc11e0a81 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.397456] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726d05c1-7049-4caf-a903-da825329f95f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.429949] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e86f5c32-5bbb-46e5-abb6-5a0a72a76d0f tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] volume-4ef803cc-0326-403f-933d-3af3fce6d68c/volume-4ef803cc-0326-403f-933d-3af3fce6d68c.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 832.434144] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33b0559f-26aa-482b-be85-de9f16071756 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.449526] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] VM already powered off {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 832.449812] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 832.450089] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.450246] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.450432] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 832.451277] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fbda171c-1fad-4136-969d-471376b9035b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.457488] env[63538]: DEBUG oslo_vmware.api [None req-e86f5c32-5bbb-46e5-abb6-5a0a72a76d0f tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 832.457488] env[63538]: value = "task-5101011" [ 832.457488] env[63538]: _type = "Task" [ 832.457488] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.468189] env[63538]: DEBUG oslo_vmware.api [None req-e86f5c32-5bbb-46e5-abb6-5a0a72a76d0f tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101011, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.470985] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 832.471523] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 832.472379] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3380fef3-8f46-4a27-87b7-c3cdcb9907da {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.482022] env[63538]: DEBUG oslo_vmware.api [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 832.482022] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a956fa-0a5b-fb60-55e8-796b2bc63560" [ 832.482022] env[63538]: _type = "Task" [ 832.482022] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.493380] env[63538]: DEBUG oslo_vmware.api [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a956fa-0a5b-fb60-55e8-796b2bc63560, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.832744] env[63538]: DEBUG oslo_vmware.api [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5101010, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.846671] env[63538]: DEBUG oslo_concurrency.lockutils [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.087s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.848017] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.075s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.850443] env[63538]: INFO nova.compute.claims [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 832.879632] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101006, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.880474] env[63538]: INFO nova.scheduler.client.report [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Deleted allocations for instance 736b110e-7265-42cc-9c9b-35f57c466b0c [ 832.971713] env[63538]: DEBUG oslo_vmware.api [None req-e86f5c32-5bbb-46e5-abb6-5a0a72a76d0f tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101011, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.992803] env[63538]: DEBUG oslo_vmware.api [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a956fa-0a5b-fb60-55e8-796b2bc63560, 'name': SearchDatastore_Task, 'duration_secs': 0.102192} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.994676] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd7c9462-f459-4c80-95b3-817b1ddf75c3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.001473] env[63538]: DEBUG oslo_vmware.api [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 833.001473] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5248691d-b30e-e791-78e5-bec8247de45e" [ 833.001473] env[63538]: _type = "Task" [ 833.001473] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.012628] env[63538]: DEBUG oslo_vmware.api [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5248691d-b30e-e791-78e5-bec8247de45e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.333107] env[63538]: DEBUG oslo_vmware.api [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5101010, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.379566] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101006, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.395204] env[63538]: DEBUG oslo_concurrency.lockutils [None req-50e23d06-c17c-4b82-9304-a965125a77c3 tempest-ServersTestFqdnHostnames-414457296 tempest-ServersTestFqdnHostnames-414457296-project-member] Lock "736b110e-7265-42cc-9c9b-35f57c466b0c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.224s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.471272] env[63538]: DEBUG oslo_vmware.api [None req-e86f5c32-5bbb-46e5-abb6-5a0a72a76d0f tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101011, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.511749] env[63538]: DEBUG oslo_vmware.api [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5248691d-b30e-e791-78e5-bec8247de45e, 'name': SearchDatastore_Task, 'duration_secs': 0.098116} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.512029] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.512307] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 87f8bb3e-6f32-4850-ac54-efad0befb268/faabbca4-e27b-433a-b93d-f059fd73bc92-rescue.vmdk. {{(pid=63538) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 833.512583] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c5c92348-192e-4bb5-98d2-653c8bef09b8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.519483] env[63538]: DEBUG oslo_vmware.api [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 833.519483] env[63538]: value = "task-5101012" [ 833.519483] env[63538]: _type = "Task" [ 833.519483] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.528284] env[63538]: DEBUG oslo_vmware.api [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101012, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.832831] env[63538]: DEBUG oslo_vmware.api [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5101010, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.090139} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.833177] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 833.833342] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 833.833531] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 833.833711] env[63538]: INFO nova.compute.manager [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Took 3.43 seconds to destroy the instance on the hypervisor. [ 833.833972] env[63538]: DEBUG oslo.service.loopingcall [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 833.834216] env[63538]: DEBUG nova.compute.manager [-] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 833.834615] env[63538]: DEBUG nova.network.neutron [-] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 833.880721] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101006, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.523679} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.880721] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/385b766b-e27c-4c97-87a2-473b5485f688/385b766b-e27c-4c97-87a2-473b5485f688.vmdk to [datastore1] f9fa5578-acf3-416f-9cb0-8ceb00e5132d/f9fa5578-acf3-416f-9cb0-8ceb00e5132d.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 833.881928] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dfb44a7-2e3e-4611-8a24-76a8d8491726 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.909422] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] f9fa5578-acf3-416f-9cb0-8ceb00e5132d/f9fa5578-acf3-416f-9cb0-8ceb00e5132d.vmdk or device None with type streamOptimized {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 833.912919] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-13d9c868-a522-4b08-b498-c7ccbce85bea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.934926] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 833.934926] env[63538]: value = "task-5101013" [ 833.934926] env[63538]: _type = "Task" [ 833.934926] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.946038] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101013, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.972380] env[63538]: DEBUG oslo_vmware.api [None req-e86f5c32-5bbb-46e5-abb6-5a0a72a76d0f tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101011, 'name': ReconfigVM_Task, 'duration_secs': 1.338559} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.975326] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e86f5c32-5bbb-46e5-abb6-5a0a72a76d0f tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Reconfigured VM instance instance-0000003b to attach disk [datastore1] volume-4ef803cc-0326-403f-933d-3af3fce6d68c/volume-4ef803cc-0326-403f-933d-3af3fce6d68c.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 833.981033] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11159a72-348c-4f55-b035-b7fdd6a43260 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.998761] env[63538]: DEBUG oslo_vmware.api [None req-e86f5c32-5bbb-46e5-abb6-5a0a72a76d0f tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 833.998761] env[63538]: value = "task-5101014" [ 833.998761] env[63538]: _type = "Task" [ 833.998761] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.012871] env[63538]: DEBUG oslo_vmware.api [None req-e86f5c32-5bbb-46e5-abb6-5a0a72a76d0f tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101014, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.032034] env[63538]: DEBUG oslo_vmware.api [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101012, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.403419] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5503332d-d8c4-42c1-822a-844a8268f5d8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.413295] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a68d1ae4-8607-472c-8412-eebadae6f6be {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.454547] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a70f9e20-1b86-42e4-a1f1-3523eb392853 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.469997] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c869f061-d17c-4628-8ab7-9c4371491c00 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.475265] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101013, 'name': ReconfigVM_Task, 'duration_secs': 0.47841} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.475265] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Reconfigured VM instance instance-0000000b to attach disk [datastore1] f9fa5578-acf3-416f-9cb0-8ceb00e5132d/f9fa5578-acf3-416f-9cb0-8ceb00e5132d.vmdk or device None with type streamOptimized {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 834.477227] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e71f491c-79a5-4ac2-ba03-289846efe4ac {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.488638] env[63538]: DEBUG nova.compute.provider_tree [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 834.492069] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 834.492069] env[63538]: value = "task-5101015" [ 834.492069] env[63538]: _type = "Task" [ 834.492069] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.506112] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101015, 'name': Rename_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.517900] env[63538]: DEBUG oslo_vmware.api [None req-e86f5c32-5bbb-46e5-abb6-5a0a72a76d0f tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101014, 'name': ReconfigVM_Task, 'duration_secs': 0.213776} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.517900] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e86f5c32-5bbb-46e5-abb6-5a0a72a76d0f tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992397', 'volume_id': '4ef803cc-0326-403f-933d-3af3fce6d68c', 'name': 'volume-4ef803cc-0326-403f-933d-3af3fce6d68c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1', 'attached_at': '', 'detached_at': '', 'volume_id': '4ef803cc-0326-403f-933d-3af3fce6d68c', 'serial': '4ef803cc-0326-403f-933d-3af3fce6d68c'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 834.530456] env[63538]: DEBUG oslo_vmware.api [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101012, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.596925} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.530744] env[63538]: INFO nova.virt.vmwareapi.ds_util [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 87f8bb3e-6f32-4850-ac54-efad0befb268/faabbca4-e27b-433a-b93d-f059fd73bc92-rescue.vmdk. [ 834.531670] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04004c40-c687-4d3d-98e5-87a1e75c3002 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.557996] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] 87f8bb3e-6f32-4850-ac54-efad0befb268/faabbca4-e27b-433a-b93d-f059fd73bc92-rescue.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 834.558669] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-643f4c74-9e27-4248-9d89-30efd86952ce {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.575389] env[63538]: DEBUG nova.compute.manager [req-2898619e-4ec8-4ec4-acd2-af2900a49104 req-5314da32-feb4-431e-ad5c-cadce6c134e3 service nova] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Received event network-vif-deleted-2b33d2bc-399c-4a17-826e-f6425766c6fd {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 834.575613] env[63538]: INFO nova.compute.manager [req-2898619e-4ec8-4ec4-acd2-af2900a49104 req-5314da32-feb4-431e-ad5c-cadce6c134e3 service nova] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Neutron deleted interface 2b33d2bc-399c-4a17-826e-f6425766c6fd; detaching it from the instance and deleting it from the info cache [ 834.575828] env[63538]: DEBUG nova.network.neutron [req-2898619e-4ec8-4ec4-acd2-af2900a49104 req-5314da32-feb4-431e-ad5c-cadce6c134e3 service nova] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.583743] env[63538]: DEBUG oslo_vmware.api [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 834.583743] env[63538]: value = "task-5101016" [ 834.583743] env[63538]: _type = "Task" [ 834.583743] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.593118] env[63538]: DEBUG oslo_vmware.api [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101016, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.993265] env[63538]: DEBUG nova.scheduler.client.report [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 835.003027] env[63538]: DEBUG nova.network.neutron [-] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.017023] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101015, 'name': Rename_Task, 'duration_secs': 0.157704} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.017023] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 835.017023] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-474aed17-6843-4bfb-bd8c-1c62337171ba {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.024622] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 835.024622] env[63538]: value = "task-5101017" [ 835.024622] env[63538]: _type = "Task" [ 835.024622] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.035743] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101017, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.078406] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0e84b728-85f7-4d37-ac77-6796482b5848 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.094544] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1053a6e4-9a38-4034-a0a7-eaa8875dcf3d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.112445] env[63538]: DEBUG oslo_vmware.api [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101016, 'name': ReconfigVM_Task, 'duration_secs': 0.33156} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.113235] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Reconfigured VM instance instance-00000039 to attach disk [datastore1] 87f8bb3e-6f32-4850-ac54-efad0befb268/faabbca4-e27b-433a-b93d-f059fd73bc92-rescue.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 835.114219] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11037fe-e8bc-4e86-ac07-6bd224aa6718 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.160102] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f73baf20-e579-45e0-a16c-e933f22e5613 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.170807] env[63538]: DEBUG nova.compute.manager [req-2898619e-4ec8-4ec4-acd2-af2900a49104 req-5314da32-feb4-431e-ad5c-cadce6c134e3 service nova] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Detach interface failed, port_id=2b33d2bc-399c-4a17-826e-f6425766c6fd, reason: Instance bd222761-92aa-4f2c-a752-ead9c498ee7a could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 835.177206] env[63538]: DEBUG oslo_vmware.api [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 835.177206] env[63538]: value = "task-5101018" [ 835.177206] env[63538]: _type = "Task" [ 835.177206] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.190208] env[63538]: DEBUG oslo_vmware.api [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101018, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.511218] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.663s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.511789] env[63538]: DEBUG nova.compute.manager [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 835.515147] env[63538]: INFO nova.compute.manager [-] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Took 1.68 seconds to deallocate network for instance. [ 835.515633] env[63538]: DEBUG oslo_concurrency.lockutils [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 43.186s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.515900] env[63538]: DEBUG nova.objects.instance [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lazy-loading 'resources' on Instance uuid b5593b74-fe89-43f5-a8c6-e73159b4efac {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 835.544557] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101017, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.580266] env[63538]: DEBUG nova.objects.instance [None req-e86f5c32-5bbb-46e5-abb6-5a0a72a76d0f tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lazy-loading 'flavor' on Instance uuid 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 835.695902] env[63538]: DEBUG oslo_vmware.api [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101018, 'name': ReconfigVM_Task, 'duration_secs': 0.16389} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.697156] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 835.697156] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d17808d-815e-4f44-b1f4-f4b721e78e6b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.707083] env[63538]: DEBUG oslo_vmware.api [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 835.707083] env[63538]: value = "task-5101019" [ 835.707083] env[63538]: _type = "Task" [ 835.707083] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.718425] env[63538]: DEBUG oslo_vmware.api [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101019, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.809894] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.024459] env[63538]: DEBUG nova.compute.utils [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 836.024459] env[63538]: DEBUG nova.compute.manager [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 836.024459] env[63538]: DEBUG nova.network.neutron [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 836.037107] env[63538]: DEBUG oslo_concurrency.lockutils [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.047912] env[63538]: DEBUG oslo_vmware.api [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101017, 'name': PowerOnVM_Task, 'duration_secs': 0.807147} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.047912] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 836.085993] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e86f5c32-5bbb-46e5-abb6-5a0a72a76d0f tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 10.875s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.087220] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.278s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.087614] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.088137] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.088479] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.094116] env[63538]: INFO nova.compute.manager [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Terminating instance [ 836.098022] env[63538]: DEBUG nova.compute.manager [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 836.098022] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 836.098022] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-47a1c45a-9331-490f-bc8f-e71eadd4a0c8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.116699] env[63538]: DEBUG oslo_vmware.api [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 836.116699] env[63538]: value = "task-5101020" [ 836.116699] env[63538]: _type = "Task" [ 836.116699] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.133772] env[63538]: DEBUG oslo_vmware.api [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101020, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.189979] env[63538]: DEBUG nova.compute.manager [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 836.189979] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd52842-17e3-48f3-b61b-2fdfd029e83b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.195390] env[63538]: DEBUG nova.policy [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ebe77c0e32ce4a32b290ba5088e107f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7063c42297c24f2baf7271fa25dec927', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 836.223039] env[63538]: DEBUG oslo_vmware.api [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101019, 'name': PowerOnVM_Task, 'duration_secs': 0.442674} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.223039] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 836.229079] env[63538]: DEBUG nova.compute.manager [None req-4cb84221-7886-432b-9e52-c61c2efda751 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 836.230021] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-207f4bc0-9ffd-4a69-b6f0-cf4187c96cf5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.532445] env[63538]: DEBUG nova.compute.manager [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 836.598618] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b2ef0aa-b4bf-4062-805a-18f476a5dc2d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.616993] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a270f4-3ac8-429e-a67f-1e313e0e83d2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.632468] env[63538]: DEBUG oslo_vmware.api [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101020, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.667169] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a2a4e2-e4b6-4549-8d74-01c35af9293c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.677266] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-818692ef-d76b-4861-b03d-e0402afbbc92 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.695608] env[63538]: DEBUG nova.compute.provider_tree [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 836.721701] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a3a60006-8139-4ace-ac19-dd1ae40e99fd tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "f9fa5578-acf3-416f-9cb0-8ceb00e5132d" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 64.761s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.884798] env[63538]: DEBUG nova.network.neutron [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Successfully created port: 696fc25d-fa83-4793-bffa-6bd2ce56f489 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 837.139084] env[63538]: DEBUG oslo_vmware.api [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101020, 'name': PowerOffVM_Task, 'duration_secs': 0.528906} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.139530] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 837.139934] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Volume detach. Driver type: vmdk {{(pid=63538) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 837.140358] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992397', 'volume_id': '4ef803cc-0326-403f-933d-3af3fce6d68c', 'name': 'volume-4ef803cc-0326-403f-933d-3af3fce6d68c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1', 'attached_at': '', 'detached_at': '', 'volume_id': '4ef803cc-0326-403f-933d-3af3fce6d68c', 'serial': '4ef803cc-0326-403f-933d-3af3fce6d68c'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 837.142105] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e6a77f8-8c4c-41a0-a730-3cb1f15d00b0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.174995] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dc64f83-60d2-4c4e-968b-8c916418a018 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.184333] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e24cb537-7f39-441e-beaf-1582c5d10c16 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.208338] env[63538]: DEBUG nova.scheduler.client.report [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 837.213171] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83b564f-51ca-43c1-a6e7-ba1f645cc952 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.237922] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] The volume has not been displaced from its original location: [datastore1] volume-4ef803cc-0326-403f-933d-3af3fce6d68c/volume-4ef803cc-0326-403f-933d-3af3fce6d68c.vmdk. No consolidation needed. {{(pid=63538) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 837.245563] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Reconfiguring VM instance instance-0000003b to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 837.246683] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eac4facd-c63f-46ff-93f6-bc34454d4fd6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.268557] env[63538]: DEBUG oslo_vmware.api [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 837.268557] env[63538]: value = "task-5101021" [ 837.268557] env[63538]: _type = "Task" [ 837.268557] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.280128] env[63538]: DEBUG oslo_vmware.api [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101021, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.546653] env[63538]: DEBUG nova.compute.manager [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 837.593737] env[63538]: DEBUG nova.virt.hardware [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 837.593737] env[63538]: DEBUG nova.virt.hardware [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 837.593737] env[63538]: DEBUG nova.virt.hardware [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 837.593948] env[63538]: DEBUG nova.virt.hardware [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 837.593993] env[63538]: DEBUG nova.virt.hardware [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 837.594143] env[63538]: DEBUG nova.virt.hardware [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 837.594400] env[63538]: DEBUG nova.virt.hardware [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 837.594563] env[63538]: DEBUG nova.virt.hardware [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 837.594733] env[63538]: DEBUG nova.virt.hardware [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 837.594939] env[63538]: DEBUG nova.virt.hardware [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 837.595098] env[63538]: DEBUG nova.virt.hardware [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 837.597876] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a52aa4a-9459-48d9-ae37-8cd8d39cef99 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.611523] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ffbb3e1-638c-4c4e-9231-119db53a89aa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.721667] env[63538]: DEBUG oslo_concurrency.lockutils [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.205s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.726054] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.419s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.726346] env[63538]: DEBUG nova.objects.instance [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lazy-loading 'resources' on Instance uuid 46e2c1f4-edf7-45d6-ba77-c872005fcf1b {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 837.762694] env[63538]: INFO nova.scheduler.client.report [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Deleted allocations for instance b5593b74-fe89-43f5-a8c6-e73159b4efac [ 837.779530] env[63538]: DEBUG oslo_vmware.api [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101021, 'name': ReconfigVM_Task, 'duration_secs': 0.354974} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.779842] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Reconfigured VM instance instance-0000003b to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 837.789547] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-955dd464-21ea-486c-aa8c-645c0c78ff29 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.809111] env[63538]: DEBUG oslo_vmware.api [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 837.809111] env[63538]: value = "task-5101022" [ 837.809111] env[63538]: _type = "Task" [ 837.809111] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.821073] env[63538]: DEBUG oslo_vmware.api [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101022, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.278440] env[63538]: DEBUG oslo_concurrency.lockutils [None req-937db9ed-b964-4e68-be70-ee19e34b69aa tempest-ListServerFiltersTestJSON-146663809 tempest-ListServerFiltersTestJSON-146663809-project-member] Lock "b5593b74-fe89-43f5-a8c6-e73159b4efac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.918s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.324087] env[63538]: DEBUG oslo_vmware.api [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101022, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.403012] env[63538]: INFO nova.compute.manager [None req-a37a3e69-8a40-46e9-a344-5be3bd5de246 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Unrescuing [ 838.403012] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a37a3e69-8a40-46e9-a344-5be3bd5de246 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "refresh_cache-87f8bb3e-6f32-4850-ac54-efad0befb268" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.403012] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a37a3e69-8a40-46e9-a344-5be3bd5de246 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquired lock "refresh_cache-87f8bb3e-6f32-4850-ac54-efad0befb268" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.403012] env[63538]: DEBUG nova.network.neutron [None req-a37a3e69-8a40-46e9-a344-5be3bd5de246 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 838.719092] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6fa48ce-c04b-41f9-bf2f-36ddfcb7f124 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.728633] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d87c21d-36af-4edc-8427-33e45d9a249d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.774343] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26fb0128-6f53-405b-845c-a0e790be5354 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.781841] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b23f6caa-09e8-4049-a185-2424d1f45ff0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.801730] env[63538]: DEBUG nova.compute.provider_tree [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 838.823450] env[63538]: DEBUG oslo_vmware.api [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101022, 'name': ReconfigVM_Task, 'duration_secs': 0.893947} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.823904] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992397', 'volume_id': '4ef803cc-0326-403f-933d-3af3fce6d68c', 'name': 'volume-4ef803cc-0326-403f-933d-3af3fce6d68c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1', 'attached_at': '', 'detached_at': '', 'volume_id': '4ef803cc-0326-403f-933d-3af3fce6d68c', 'serial': '4ef803cc-0326-403f-933d-3af3fce6d68c'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 838.824347] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 838.825713] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4578da02-183f-4b0c-a9fd-db598e2d39ea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.834667] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 838.834983] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c896a774-33f4-4f82-acee-d959d000fdd4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.918640] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 838.919061] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 838.919310] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Deleting the datastore file [datastore2] 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 838.919694] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9bfe28f3-15d8-49f9-88cc-78e97f18c87b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.931055] env[63538]: DEBUG oslo_vmware.api [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 838.931055] env[63538]: value = "task-5101024" [ 838.931055] env[63538]: _type = "Task" [ 838.931055] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.941343] env[63538]: DEBUG oslo_vmware.api [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101024, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.024409] env[63538]: DEBUG nova.network.neutron [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Successfully updated port: 696fc25d-fa83-4793-bffa-6bd2ce56f489 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 839.222168] env[63538]: DEBUG nova.compute.manager [req-dd025770-92e6-4048-ac8c-c70951aba685 req-ab04631a-5905-47ef-a593-41bcbae19deb service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Received event network-vif-plugged-696fc25d-fa83-4793-bffa-6bd2ce56f489 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 839.222440] env[63538]: DEBUG oslo_concurrency.lockutils [req-dd025770-92e6-4048-ac8c-c70951aba685 req-ab04631a-5905-47ef-a593-41bcbae19deb service nova] Acquiring lock "d967631f-5c8a-42d8-ac05-4cec3bdb55cf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.222660] env[63538]: DEBUG oslo_concurrency.lockutils [req-dd025770-92e6-4048-ac8c-c70951aba685 req-ab04631a-5905-47ef-a593-41bcbae19deb service nova] Lock "d967631f-5c8a-42d8-ac05-4cec3bdb55cf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.222831] env[63538]: DEBUG oslo_concurrency.lockutils [req-dd025770-92e6-4048-ac8c-c70951aba685 req-ab04631a-5905-47ef-a593-41bcbae19deb service nova] Lock "d967631f-5c8a-42d8-ac05-4cec3bdb55cf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.223017] env[63538]: DEBUG nova.compute.manager [req-dd025770-92e6-4048-ac8c-c70951aba685 req-ab04631a-5905-47ef-a593-41bcbae19deb service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] No waiting events found dispatching network-vif-plugged-696fc25d-fa83-4793-bffa-6bd2ce56f489 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 839.223192] env[63538]: WARNING nova.compute.manager [req-dd025770-92e6-4048-ac8c-c70951aba685 req-ab04631a-5905-47ef-a593-41bcbae19deb service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Received unexpected event network-vif-plugged-696fc25d-fa83-4793-bffa-6bd2ce56f489 for instance with vm_state building and task_state spawning. [ 839.224363] env[63538]: DEBUG nova.network.neutron [None req-a37a3e69-8a40-46e9-a344-5be3bd5de246 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Updating instance_info_cache with network_info: [{"id": "6edf1839-d708-46ad-836d-e2ecac08730f", "address": "fa:16:3e:0c:3b:b0", "network": {"id": "549f45d6-3d4f-4476-92ba-4bd87fefba51", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-822672880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55edcd65da7b4a569a4c27aab4819cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ded8bac-871f-491b-94ec-cb67c08bc828", "external-id": "nsx-vlan-transportzone-212", "segmentation_id": 212, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6edf1839-d7", "ovs_interfaceid": "6edf1839-d708-46ad-836d-e2ecac08730f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.305725] env[63538]: DEBUG nova.scheduler.client.report [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 839.441853] env[63538]: DEBUG oslo_vmware.api [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101024, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.289254} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.441853] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 839.441853] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 839.441853] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 839.442235] env[63538]: INFO nova.compute.manager [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Took 3.35 seconds to destroy the instance on the hypervisor. [ 839.442271] env[63538]: DEBUG oslo.service.loopingcall [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 839.442485] env[63538]: DEBUG nova.compute.manager [-] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 839.442613] env[63538]: DEBUG nova.network.neutron [-] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 839.535749] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "refresh_cache-d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.535749] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "refresh_cache-d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.535749] env[63538]: DEBUG nova.network.neutron [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 839.727788] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a37a3e69-8a40-46e9-a344-5be3bd5de246 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Releasing lock "refresh_cache-87f8bb3e-6f32-4850-ac54-efad0befb268" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.728675] env[63538]: DEBUG nova.objects.instance [None req-a37a3e69-8a40-46e9-a344-5be3bd5de246 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lazy-loading 'flavor' on Instance uuid 87f8bb3e-6f32-4850-ac54-efad0befb268 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 839.811333] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.085s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.816385] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.183s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.816385] env[63538]: DEBUG nova.objects.instance [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lazy-loading 'resources' on Instance uuid e4b94aa7-7434-4a6e-b6d3-ed02315c435f {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 839.845469] env[63538]: INFO nova.scheduler.client.report [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Deleted allocations for instance 46e2c1f4-edf7-45d6-ba77-c872005fcf1b [ 840.021021] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Acquiring lock "b0b4ae9c-95d3-47a1-86a7-120c88b60704" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.022520] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Lock "b0b4ae9c-95d3-47a1-86a7-120c88b60704" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.081566] env[63538]: DEBUG nova.network.neutron [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 840.157082] env[63538]: DEBUG nova.compute.manager [req-cf852f45-08f4-46b6-9045-ed75f3cc7063 req-4f1ba4d2-71ce-430e-a648-cdbc12530c8d service nova] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Received event network-vif-deleted-177a8c57-989c-48d3-bf05-40bead2e9b7f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 840.157372] env[63538]: INFO nova.compute.manager [req-cf852f45-08f4-46b6-9045-ed75f3cc7063 req-4f1ba4d2-71ce-430e-a648-cdbc12530c8d service nova] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Neutron deleted interface 177a8c57-989c-48d3-bf05-40bead2e9b7f; detaching it from the instance and deleting it from the info cache [ 840.157563] env[63538]: DEBUG nova.network.neutron [req-cf852f45-08f4-46b6-9045-ed75f3cc7063 req-4f1ba4d2-71ce-430e-a648-cdbc12530c8d service nova] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.234967] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13552916-546a-4ecd-b1c4-96093238e288 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.271787] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a37a3e69-8a40-46e9-a344-5be3bd5de246 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 840.272719] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-33adaccf-5340-4d53-be8e-6b7751ed38e4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.281543] env[63538]: DEBUG oslo_vmware.api [None req-a37a3e69-8a40-46e9-a344-5be3bd5de246 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 840.281543] env[63538]: value = "task-5101025" [ 840.281543] env[63538]: _type = "Task" [ 840.281543] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.293615] env[63538]: DEBUG oslo_vmware.api [None req-a37a3e69-8a40-46e9-a344-5be3bd5de246 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101025, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.323272] env[63538]: DEBUG nova.network.neutron [-] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.342838] env[63538]: DEBUG nova.network.neutron [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Updating instance_info_cache with network_info: [{"id": "696fc25d-fa83-4793-bffa-6bd2ce56f489", "address": "fa:16:3e:60:55:b3", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap696fc25d-fa", "ovs_interfaceid": "696fc25d-fa83-4793-bffa-6bd2ce56f489", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.354304] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e5e6599a-c477-4e82-9eba-08f1c955678c tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "46e2c1f4-edf7-45d6-ba77-c872005fcf1b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.086s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.524469] env[63538]: DEBUG nova.compute.manager [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 840.668028] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-045f4d3b-56fe-4e52-96d9-926ab8ff8421 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.682069] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9343666a-b09f-49e7-b3ba-1fa2bcd427f6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.724521] env[63538]: DEBUG nova.compute.manager [req-cf852f45-08f4-46b6-9045-ed75f3cc7063 req-4f1ba4d2-71ce-430e-a648-cdbc12530c8d service nova] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Detach interface failed, port_id=177a8c57-989c-48d3-bf05-40bead2e9b7f, reason: Instance 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 840.776914] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb5bd211-59b6-4703-b46b-12b8151281cc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.789338] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-82a52df4-ce63-4c02-9002-6aa30da793ca tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Suspending the VM {{(pid=63538) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1163}} [ 840.789625] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-c977c9c2-be0e-49ab-b8c2-d35ba9e0877c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.797011] env[63538]: DEBUG oslo_vmware.api [None req-a37a3e69-8a40-46e9-a344-5be3bd5de246 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101025, 'name': PowerOffVM_Task, 'duration_secs': 0.300339} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.797857] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a37a3e69-8a40-46e9-a344-5be3bd5de246 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 840.803398] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-a37a3e69-8a40-46e9-a344-5be3bd5de246 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Reconfiguring VM instance instance-00000039 to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 840.803837] env[63538]: DEBUG oslo_vmware.api [None req-82a52df4-ce63-4c02-9002-6aa30da793ca tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 840.803837] env[63538]: value = "task-5101026" [ 840.803837] env[63538]: _type = "Task" [ 840.803837] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.809298] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b7cf5156-9fb1-4f04-aa9f-67c7cd5d85be {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.830853] env[63538]: INFO nova.compute.manager [-] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Took 1.39 seconds to deallocate network for instance. [ 840.838920] env[63538]: DEBUG oslo_vmware.api [None req-a37a3e69-8a40-46e9-a344-5be3bd5de246 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 840.838920] env[63538]: value = "task-5101027" [ 840.838920] env[63538]: _type = "Task" [ 840.838920] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.840649] env[63538]: DEBUG oslo_vmware.api [None req-82a52df4-ce63-4c02-9002-6aa30da793ca tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101026, 'name': SuspendVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.848018] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "refresh_cache-d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.848018] env[63538]: DEBUG nova.compute.manager [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Instance network_info: |[{"id": "696fc25d-fa83-4793-bffa-6bd2ce56f489", "address": "fa:16:3e:60:55:b3", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap696fc25d-fa", "ovs_interfaceid": "696fc25d-fa83-4793-bffa-6bd2ce56f489", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 840.848018] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:55:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '412cde91-d0f0-4193-b36b-d8b9d17384c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '696fc25d-fa83-4793-bffa-6bd2ce56f489', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 840.855613] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Creating folder: Project (7063c42297c24f2baf7271fa25dec927). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 840.857093] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-45be1b89-5266-4ac8-a9d9-c0141bfe1075 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.868839] env[63538]: DEBUG oslo_vmware.api [None req-a37a3e69-8a40-46e9-a344-5be3bd5de246 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101027, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.880458] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Created folder: Project (7063c42297c24f2baf7271fa25dec927) in parent group-v992234. [ 840.880966] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Creating folder: Instances. Parent ref: group-v992398. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 840.881537] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb240400-f615-4993-94e7-7d18f0826bf6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.898258] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Created folder: Instances in parent group-v992398. [ 840.898515] env[63538]: DEBUG oslo.service.loopingcall [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 840.898804] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 840.899093] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a53fe7a2-8a13-43e8-8aeb-1d617931b510 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.922575] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d4a606e-17f7-4436-9d44-a9f1cd4bc23a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.935257] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87ce9762-3280-4276-89ca-acd0dca87f63 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.940273] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 840.940273] env[63538]: value = "task-5101030" [ 840.940273] env[63538]: _type = "Task" [ 840.940273] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.980801] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc0bb11-10ac-4272-a1c9-3ce2e310240d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.989780] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101030, 'name': CreateVM_Task} progress is 15%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.996963] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d3f2a0-c2e6-4d3d-b19b-1096a9cb5b29 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.015153] env[63538]: DEBUG nova.compute.provider_tree [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 841.060844] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.258391] env[63538]: DEBUG nova.compute.manager [req-7996a317-ccb4-443f-8bb4-fb378aefb03d req-5d89dd3c-bc4c-4ff9-9ac3-a83498819c0b service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Received event network-changed-696fc25d-fa83-4793-bffa-6bd2ce56f489 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 841.258803] env[63538]: DEBUG nova.compute.manager [req-7996a317-ccb4-443f-8bb4-fb378aefb03d req-5d89dd3c-bc4c-4ff9-9ac3-a83498819c0b service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Refreshing instance network info cache due to event network-changed-696fc25d-fa83-4793-bffa-6bd2ce56f489. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 841.261428] env[63538]: DEBUG oslo_concurrency.lockutils [req-7996a317-ccb4-443f-8bb4-fb378aefb03d req-5d89dd3c-bc4c-4ff9-9ac3-a83498819c0b service nova] Acquiring lock "refresh_cache-d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.261428] env[63538]: DEBUG oslo_concurrency.lockutils [req-7996a317-ccb4-443f-8bb4-fb378aefb03d req-5d89dd3c-bc4c-4ff9-9ac3-a83498819c0b service nova] Acquired lock "refresh_cache-d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.261428] env[63538]: DEBUG nova.network.neutron [req-7996a317-ccb4-443f-8bb4-fb378aefb03d req-5d89dd3c-bc4c-4ff9-9ac3-a83498819c0b service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Refreshing network info cache for port 696fc25d-fa83-4793-bffa-6bd2ce56f489 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 841.339842] env[63538]: DEBUG oslo_vmware.api [None req-82a52df4-ce63-4c02-9002-6aa30da793ca tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101026, 'name': SuspendVM_Task} progress is 62%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.351932] env[63538]: DEBUG oslo_vmware.api [None req-a37a3e69-8a40-46e9-a344-5be3bd5de246 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101027, 'name': ReconfigVM_Task, 'duration_secs': 0.33709} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.352897] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-a37a3e69-8a40-46e9-a344-5be3bd5de246 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Reconfigured VM instance instance-00000039 to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 841.353474] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a37a3e69-8a40-46e9-a344-5be3bd5de246 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 841.354019] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8ab0c287-eb86-49c6-9023-eda3dfec50dc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.363945] env[63538]: DEBUG oslo_vmware.api [None req-a37a3e69-8a40-46e9-a344-5be3bd5de246 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 841.363945] env[63538]: value = "task-5101031" [ 841.363945] env[63538]: _type = "Task" [ 841.363945] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.379239] env[63538]: DEBUG oslo_vmware.api [None req-a37a3e69-8a40-46e9-a344-5be3bd5de246 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101031, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.415740] env[63538]: INFO nova.compute.manager [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Took 0.58 seconds to detach 1 volumes for instance. [ 841.456294] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101030, 'name': CreateVM_Task, 'duration_secs': 0.392833} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.456505] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 841.458177] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.458177] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.458626] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 841.458941] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91615e53-5cb1-4988-8875-a11d3bfae1a4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.466043] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 841.466043] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5212f469-47c4-0e67-197e-803f67012cf9" [ 841.466043] env[63538]: _type = "Task" [ 841.466043] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.478694] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5212f469-47c4-0e67-197e-803f67012cf9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.518419] env[63538]: DEBUG nova.scheduler.client.report [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 841.754780] env[63538]: DEBUG oslo_concurrency.lockutils [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "d5d557c6-3d4e-4122-8756-218c9757fa01" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.755040] env[63538]: DEBUG oslo_concurrency.lockutils [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "d5d557c6-3d4e-4122-8756-218c9757fa01" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.836770] env[63538]: DEBUG oslo_vmware.api [None req-82a52df4-ce63-4c02-9002-6aa30da793ca tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101026, 'name': SuspendVM_Task, 'duration_secs': 0.740755} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.837126] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-82a52df4-ce63-4c02-9002-6aa30da793ca tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Suspended the VM {{(pid=63538) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1167}} [ 841.837374] env[63538]: DEBUG nova.compute.manager [None req-82a52df4-ce63-4c02-9002-6aa30da793ca tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 841.838121] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0ed5d01-1f78-4c77-a6bc-31aa070a53e7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.877223] env[63538]: DEBUG oslo_vmware.api [None req-a37a3e69-8a40-46e9-a344-5be3bd5de246 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101031, 'name': PowerOnVM_Task, 'duration_secs': 0.395251} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.877425] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a37a3e69-8a40-46e9-a344-5be3bd5de246 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 841.877949] env[63538]: DEBUG nova.compute.manager [None req-a37a3e69-8a40-46e9-a344-5be3bd5de246 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 841.878968] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ad098e-77a2-455c-beae-d3cd4a893812 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.923454] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.979090] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5212f469-47c4-0e67-197e-803f67012cf9, 'name': SearchDatastore_Task, 'duration_secs': 0.014932} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.979423] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.979664] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 841.979909] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.980080] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.980271] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 841.980598] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c288cfa-3d11-4035-89c6-e54e61c94bb1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.992989] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 841.993246] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 841.994103] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-809c5f70-924b-40b7-a3da-c4f0fab6d429 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.000208] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 842.000208] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526842f0-ab2a-158f-c920-d75ff26a049a" [ 842.000208] env[63538]: _type = "Task" [ 842.000208] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.010085] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526842f0-ab2a-158f-c920-d75ff26a049a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.023808] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.210s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.026715] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.401s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.028259] env[63538]: INFO nova.compute.claims [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 842.065096] env[63538]: INFO nova.scheduler.client.report [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Deleted allocations for instance e4b94aa7-7434-4a6e-b6d3-ed02315c435f [ 842.216035] env[63538]: DEBUG nova.network.neutron [req-7996a317-ccb4-443f-8bb4-fb378aefb03d req-5d89dd3c-bc4c-4ff9-9ac3-a83498819c0b service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Updated VIF entry in instance network info cache for port 696fc25d-fa83-4793-bffa-6bd2ce56f489. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 842.216309] env[63538]: DEBUG nova.network.neutron [req-7996a317-ccb4-443f-8bb4-fb378aefb03d req-5d89dd3c-bc4c-4ff9-9ac3-a83498819c0b service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Updating instance_info_cache with network_info: [{"id": "696fc25d-fa83-4793-bffa-6bd2ce56f489", "address": "fa:16:3e:60:55:b3", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap696fc25d-fa", "ovs_interfaceid": "696fc25d-fa83-4793-bffa-6bd2ce56f489", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.259230] env[63538]: DEBUG nova.compute.utils [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 842.513052] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526842f0-ab2a-158f-c920-d75ff26a049a, 'name': SearchDatastore_Task, 'duration_secs': 0.011135} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.514416] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6f72205-9c1f-41d9-8058-702f4aebc17a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.521017] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 842.521017] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c2cae5-23b4-cbc0-30fd-b91e080fac56" [ 842.521017] env[63538]: _type = "Task" [ 842.521017] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.531333] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c2cae5-23b4-cbc0-30fd-b91e080fac56, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.578755] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8115f90-9d84-4a94-85a6-77b6b088d1bb tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "e4b94aa7-7434-4a6e-b6d3-ed02315c435f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 52.431s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.722706] env[63538]: DEBUG oslo_concurrency.lockutils [req-7996a317-ccb4-443f-8bb4-fb378aefb03d req-5d89dd3c-bc4c-4ff9-9ac3-a83498819c0b service nova] Releasing lock "refresh_cache-d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.762950] env[63538]: DEBUG oslo_concurrency.lockutils [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "d5d557c6-3d4e-4122-8756-218c9757fa01" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.039145] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c2cae5-23b4-cbc0-30fd-b91e080fac56, 'name': SearchDatastore_Task, 'duration_secs': 0.011649} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.040420] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.043116] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] d967631f-5c8a-42d8-ac05-4cec3bdb55cf/d967631f-5c8a-42d8-ac05-4cec3bdb55cf.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 843.043116] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-79b71a49-ac9b-4b3e-850a-642871468687 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.054459] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 843.054459] env[63538]: value = "task-5101032" [ 843.054459] env[63538]: _type = "Task" [ 843.054459] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.063942] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101032, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.466662] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c70efa60-ccb4-45db-b16b-0b4b74aea264 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.475475] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39707dd8-6492-4792-a74d-9ffa9f01560a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.514442] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e974202e-7ceb-44c6-a138-1a62ae5ffa6d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.522117] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e941955-314a-4c23-80e4-b720f91537a2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.537447] env[63538]: DEBUG nova.compute.provider_tree [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 843.569832] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101032, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.852586] env[63538]: DEBUG oslo_concurrency.lockutils [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "d5d557c6-3d4e-4122-8756-218c9757fa01" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.852586] env[63538]: DEBUG oslo_concurrency.lockutils [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "d5d557c6-3d4e-4122-8756-218c9757fa01" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.852586] env[63538]: INFO nova.compute.manager [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Attaching volume bd560018-1c74-4223-aa48-6dd904889923 to /dev/sdb [ 843.910527] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9bc3e93-f535-4aed-91a9-c82618b494fe {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.917375] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12515699-1812-46f4-a354-d4022ebf0c57 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.933261] env[63538]: DEBUG nova.virt.block_device [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Updating existing volume attachment record: fefb22e2-d5e4-4981-9f5e-07323bb1277d {{(pid=63538) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 844.043226] env[63538]: DEBUG nova.scheduler.client.report [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 844.069831] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101032, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.598059} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.069831] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] d967631f-5c8a-42d8-ac05-4cec3bdb55cf/d967631f-5c8a-42d8-ac05-4cec3bdb55cf.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 844.069831] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 844.069831] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-30a6b47c-cee2-4f8f-823b-00639d24665c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.078929] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 844.078929] env[63538]: value = "task-5101033" [ 844.078929] env[63538]: _type = "Task" [ 844.078929] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.093163] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101033, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.309831] env[63538]: INFO nova.compute.manager [None req-c75879d1-11cc-49a6-876f-be3122b0c80a tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Resuming [ 844.310380] env[63538]: DEBUG nova.objects.instance [None req-c75879d1-11cc-49a6-876f-be3122b0c80a tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lazy-loading 'flavor' on Instance uuid f9fa5578-acf3-416f-9cb0-8ceb00e5132d {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 844.551912] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.525s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.552408] env[63538]: DEBUG nova.compute.manager [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 844.557396] env[63538]: DEBUG oslo_concurrency.lockutils [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 47.258s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.593586] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101033, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08927} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.594322] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 844.595506] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c8ddd4-049a-4688-a24c-1f26632a8c0d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.624691] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Reconfiguring VM instance instance-0000003c to attach disk [datastore2] d967631f-5c8a-42d8-ac05-4cec3bdb55cf/d967631f-5c8a-42d8-ac05-4cec3bdb55cf.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 844.625154] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3bdb5aec-b1c2-41d7-84b5-3672188ecc7c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.651040] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 844.651040] env[63538]: value = "task-5101037" [ 844.651040] env[63538]: _type = "Task" [ 844.651040] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.661781] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101037, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.010197] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "1db1d558-2473-49cb-b309-f7192bd6b9c1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.010329] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "1db1d558-2473-49cb-b309-f7192bd6b9c1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.051117] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "fa8ed101-914d-4751-ab9b-f68ad5da7a56" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.051440] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "fa8ed101-914d-4751-ab9b-f68ad5da7a56" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.063951] env[63538]: INFO nova.compute.claims [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 845.071018] env[63538]: DEBUG nova.compute.utils [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 845.071018] env[63538]: DEBUG nova.compute.manager [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 845.071018] env[63538]: DEBUG nova.network.neutron [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 845.146102] env[63538]: DEBUG nova.policy [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '424949fbf660474c864efecda1180e97', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f7d0ccfeee04a17a6970f5dc7383f94', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 845.163213] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101037, 'name': ReconfigVM_Task, 'duration_secs': 0.32168} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.163628] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Reconfigured VM instance instance-0000003c to attach disk [datastore2] d967631f-5c8a-42d8-ac05-4cec3bdb55cf/d967631f-5c8a-42d8-ac05-4cec3bdb55cf.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 845.164369] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eb935b58-89b9-4970-a3f8-ce8253225b9b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.173016] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 845.173016] env[63538]: value = "task-5101038" [ 845.173016] env[63538]: _type = "Task" [ 845.173016] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.187470] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101038, 'name': Rename_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.320012] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c75879d1-11cc-49a6-876f-be3122b0c80a tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquiring lock "refresh_cache-f9fa5578-acf3-416f-9cb0-8ceb00e5132d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.320224] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c75879d1-11cc-49a6-876f-be3122b0c80a tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquired lock "refresh_cache-f9fa5578-acf3-416f-9cb0-8ceb00e5132d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.320408] env[63538]: DEBUG nova.network.neutron [None req-c75879d1-11cc-49a6-876f-be3122b0c80a tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 845.513582] env[63538]: DEBUG nova.compute.manager [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 845.574023] env[63538]: INFO nova.compute.resource_tracker [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Updating resource usage from migration 0c630b5a-3695-4f8a-95d5-b51ed38cf5ce [ 845.578180] env[63538]: DEBUG nova.compute.manager [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 845.685437] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101038, 'name': Rename_Task, 'duration_secs': 0.139464} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.685722] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 845.685969] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0aa2b43a-1907-4d54-af72-e6b1d356bcca {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.695939] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 845.695939] env[63538]: value = "task-5101039" [ 845.695939] env[63538]: _type = "Task" [ 845.695939] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.709551] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101039, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.791115] env[63538]: DEBUG nova.network.neutron [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Successfully created port: 9fa95b30-4f3b-4051-94dc-c49ea22b7275 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 845.999225] env[63538]: DEBUG oslo_concurrency.lockutils [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "0df15328-aebd-44c5-9c78-ee05f188ad95" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.999466] env[63538]: DEBUG oslo_concurrency.lockutils [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "0df15328-aebd-44c5-9c78-ee05f188ad95" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.036332] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.069422] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c20e531b-1588-4ac1-a370-f3973c8da183 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.078799] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ccde2a4-3cac-473a-a86a-2f08ecd5fcf3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.123109] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add35474-3e45-42fe-abd3-236d7bde1552 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.137397] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3140fdc2-ed9a-4600-bcbd-065f4b602cfb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.154373] env[63538]: DEBUG nova.compute.provider_tree [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.173753] env[63538]: DEBUG nova.network.neutron [None req-c75879d1-11cc-49a6-876f-be3122b0c80a tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Updating instance_info_cache with network_info: [{"id": "a5dfe48b-4acc-472c-8e00-f936b4068ea5", "address": "fa:16:3e:a9:56:1a", "network": {"id": "4fd42fb1-fcc5-467d-88b7-1a5ab3297631", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-9303923-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c5e6ed681ed4078bd9115b30f419d9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5dfe48b-4a", "ovs_interfaceid": "a5dfe48b-4acc-472c-8e00-f936b4068ea5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.208451] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101039, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.364121] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "edcc5700-7b1e-494a-82d1-844373a9d5a6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.364400] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "edcc5700-7b1e-494a-82d1-844373a9d5a6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.590945] env[63538]: DEBUG nova.compute.manager [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 846.618973] env[63538]: DEBUG nova.virt.hardware [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 846.619379] env[63538]: DEBUG nova.virt.hardware [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 846.619453] env[63538]: DEBUG nova.virt.hardware [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 846.619641] env[63538]: DEBUG nova.virt.hardware [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 846.619823] env[63538]: DEBUG nova.virt.hardware [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 846.619962] env[63538]: DEBUG nova.virt.hardware [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 846.620216] env[63538]: DEBUG nova.virt.hardware [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 846.620462] env[63538]: DEBUG nova.virt.hardware [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 846.620700] env[63538]: DEBUG nova.virt.hardware [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 846.620909] env[63538]: DEBUG nova.virt.hardware [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 846.621162] env[63538]: DEBUG nova.virt.hardware [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 846.622087] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca98bf59-ea36-405f-b158-879941faddde {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.630864] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-649d871c-c433-4236-8032-59c5c8d4a469 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.657037] env[63538]: DEBUG nova.scheduler.client.report [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 846.676743] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c75879d1-11cc-49a6-876f-be3122b0c80a tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Releasing lock "refresh_cache-f9fa5578-acf3-416f-9cb0-8ceb00e5132d" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.678053] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d28855-9d37-462c-97fc-24b08babd565 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.685219] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c75879d1-11cc-49a6-876f-be3122b0c80a tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Resuming the VM {{(pid=63538) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1184}} [ 846.685503] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-55ce1435-9d97-46d4-aa6a-9a9b63a64dea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.692589] env[63538]: DEBUG oslo_vmware.api [None req-c75879d1-11cc-49a6-876f-be3122b0c80a tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 846.692589] env[63538]: value = "task-5101041" [ 846.692589] env[63538]: _type = "Task" [ 846.692589] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.702929] env[63538]: DEBUG oslo_vmware.api [None req-c75879d1-11cc-49a6-876f-be3122b0c80a tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101041, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.708810] env[63538]: DEBUG oslo_vmware.api [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101039, 'name': PowerOnVM_Task, 'duration_secs': 0.536559} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.709095] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 846.709360] env[63538]: INFO nova.compute.manager [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Took 9.17 seconds to spawn the instance on the hypervisor. [ 846.709484] env[63538]: DEBUG nova.compute.manager [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 846.710371] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79caa79b-1ea4-4aa2-8c5d-d3f40db8e10d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.162438] env[63538]: DEBUG oslo_concurrency.lockutils [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.606s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.162822] env[63538]: INFO nova.compute.manager [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Migrating [ 847.170811] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.981s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.170811] env[63538]: DEBUG nova.objects.instance [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Lazy-loading 'resources' on Instance uuid 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 847.207282] env[63538]: DEBUG oslo_vmware.api [None req-c75879d1-11cc-49a6-876f-be3122b0c80a tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101041, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.230406] env[63538]: INFO nova.compute.manager [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Took 56.48 seconds to build instance. [ 847.645564] env[63538]: DEBUG nova.compute.manager [req-d1f623b0-a104-4fa9-b314-f04ecdf069a1 req-b017f052-ab64-4f87-aea7-aa0ac8edf9e2 service nova] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Received event network-vif-plugged-9fa95b30-4f3b-4051-94dc-c49ea22b7275 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 847.645873] env[63538]: DEBUG oslo_concurrency.lockutils [req-d1f623b0-a104-4fa9-b314-f04ecdf069a1 req-b017f052-ab64-4f87-aea7-aa0ac8edf9e2 service nova] Acquiring lock "0e718984-cfce-4620-9be6-fdcfb4954da8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.646018] env[63538]: DEBUG oslo_concurrency.lockutils [req-d1f623b0-a104-4fa9-b314-f04ecdf069a1 req-b017f052-ab64-4f87-aea7-aa0ac8edf9e2 service nova] Lock "0e718984-cfce-4620-9be6-fdcfb4954da8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.646195] env[63538]: DEBUG oslo_concurrency.lockutils [req-d1f623b0-a104-4fa9-b314-f04ecdf069a1 req-b017f052-ab64-4f87-aea7-aa0ac8edf9e2 service nova] Lock "0e718984-cfce-4620-9be6-fdcfb4954da8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.646367] env[63538]: DEBUG nova.compute.manager [req-d1f623b0-a104-4fa9-b314-f04ecdf069a1 req-b017f052-ab64-4f87-aea7-aa0ac8edf9e2 service nova] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] No waiting events found dispatching network-vif-plugged-9fa95b30-4f3b-4051-94dc-c49ea22b7275 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 847.647148] env[63538]: WARNING nova.compute.manager [req-d1f623b0-a104-4fa9-b314-f04ecdf069a1 req-b017f052-ab64-4f87-aea7-aa0ac8edf9e2 service nova] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Received unexpected event network-vif-plugged-9fa95b30-4f3b-4051-94dc-c49ea22b7275 for instance with vm_state building and task_state spawning. [ 847.682705] env[63538]: DEBUG oslo_concurrency.lockutils [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "refresh_cache-a2e036ae-318b-44ea-9db0-10fa3838728b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.682918] env[63538]: DEBUG oslo_concurrency.lockutils [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquired lock "refresh_cache-a2e036ae-318b-44ea-9db0-10fa3838728b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.684018] env[63538]: DEBUG nova.network.neutron [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 847.711253] env[63538]: DEBUG oslo_vmware.api [None req-c75879d1-11cc-49a6-876f-be3122b0c80a tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101041, 'name': PowerOnVM_Task, 'duration_secs': 0.584126} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.714727] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c75879d1-11cc-49a6-876f-be3122b0c80a tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Resumed the VM {{(pid=63538) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1189}} [ 847.714901] env[63538]: DEBUG nova.compute.manager [None req-c75879d1-11cc-49a6-876f-be3122b0c80a tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 847.716653] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f745d7-c199-4592-8c4e-bd80212dc413 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.733016] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e185473b-495d-4e86-981a-53af5fa66303 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "d967631f-5c8a-42d8-ac05-4cec3bdb55cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.634s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.824432] env[63538]: DEBUG nova.network.neutron [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Successfully updated port: 9fa95b30-4f3b-4051-94dc-c49ea22b7275 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 848.139642] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-466b3695-823f-4ff3-ac45-1f6fa6145015 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.146615] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9703dd9-39f6-412c-bd39-f6337ed88016 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.177540] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db3ca32-a45e-407d-bdfe-9e0eb72d127a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.185990] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd6ad9e3-69e8-4504-9d15-693624854bed {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.202788] env[63538]: DEBUG nova.compute.provider_tree [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 848.237144] env[63538]: DEBUG nova.compute.manager [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 848.327375] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Acquiring lock "refresh_cache-0e718984-cfce-4620-9be6-fdcfb4954da8" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.327542] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Acquired lock "refresh_cache-0e718984-cfce-4620-9be6-fdcfb4954da8" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.327664] env[63538]: DEBUG nova.network.neutron [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 848.475572] env[63538]: DEBUG nova.network.neutron [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Updating instance_info_cache with network_info: [{"id": "11d7dbc5-d269-456b-9a7a-601759e64b51", "address": "fa:16:3e:45:35:dd", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.81", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11d7dbc5-d2", "ovs_interfaceid": "11d7dbc5-d269-456b-9a7a-601759e64b51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.705933] env[63538]: DEBUG nova.scheduler.client.report [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 848.767155] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.876599] env[63538]: DEBUG nova.network.neutron [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 848.978488] env[63538]: DEBUG oslo_concurrency.lockutils [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Releasing lock "refresh_cache-a2e036ae-318b-44ea-9db0-10fa3838728b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.016894] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Volume attach. Driver type: vmdk {{(pid=63538) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 849.017163] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992402', 'volume_id': 'bd560018-1c74-4223-aa48-6dd904889923', 'name': 'volume-bd560018-1c74-4223-aa48-6dd904889923', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd5d557c6-3d4e-4122-8756-218c9757fa01', 'attached_at': '', 'detached_at': '', 'volume_id': 'bd560018-1c74-4223-aa48-6dd904889923', 'serial': 'bd560018-1c74-4223-aa48-6dd904889923'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 849.018139] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e10479e-5a47-46e3-8eb0-a487dde1d3ee {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.038160] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23fed6dd-329d-462c-a554-7eff3d9e3828 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.072024] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] volume-bd560018-1c74-4223-aa48-6dd904889923/volume-bd560018-1c74-4223-aa48-6dd904889923.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 849.072024] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e62b8ba-f8cc-4c14-ba1a-5c65746b1510 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.089392] env[63538]: DEBUG oslo_vmware.api [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 849.089392] env[63538]: value = "task-5101042" [ 849.089392] env[63538]: _type = "Task" [ 849.089392] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.099186] env[63538]: DEBUG oslo_vmware.api [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101042, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.113362] env[63538]: DEBUG nova.network.neutron [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Updating instance_info_cache with network_info: [{"id": "9fa95b30-4f3b-4051-94dc-c49ea22b7275", "address": "fa:16:3e:0e:aa:c3", "network": {"id": "cdb9e991-6d54-4556-aee9-e21aa418c5a3", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-543465917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f7d0ccfeee04a17a6970f5dc7383f94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fa95b30-4f", "ovs_interfaceid": "9fa95b30-4f3b-4051-94dc-c49ea22b7275", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.210916] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.040s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.213642] env[63538]: DEBUG oslo_concurrency.lockutils [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.291s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.215895] env[63538]: INFO nova.compute.claims [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 849.232775] env[63538]: INFO nova.scheduler.client.report [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Deleted allocations for instance 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b [ 849.600306] env[63538]: DEBUG oslo_vmware.api [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101042, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.616352] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Releasing lock "refresh_cache-0e718984-cfce-4620-9be6-fdcfb4954da8" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.616687] env[63538]: DEBUG nova.compute.manager [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Instance network_info: |[{"id": "9fa95b30-4f3b-4051-94dc-c49ea22b7275", "address": "fa:16:3e:0e:aa:c3", "network": {"id": "cdb9e991-6d54-4556-aee9-e21aa418c5a3", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-543465917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f7d0ccfeee04a17a6970f5dc7383f94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fa95b30-4f", "ovs_interfaceid": "9fa95b30-4f3b-4051-94dc-c49ea22b7275", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 849.617428] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:aa:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d4ef133-b6f3-41d1-add4-92a1482195cf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9fa95b30-4f3b-4051-94dc-c49ea22b7275', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 849.626196] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Creating folder: Project (3f7d0ccfeee04a17a6970f5dc7383f94). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 849.626557] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ad61562c-4e25-4236-91cd-fce14ac10934 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.640234] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Created folder: Project (3f7d0ccfeee04a17a6970f5dc7383f94) in parent group-v992234. [ 849.640234] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Creating folder: Instances. Parent ref: group-v992403. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 849.640234] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-954014ce-f62b-4946-be8b-6e693a0f429c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.650731] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Created folder: Instances in parent group-v992403. [ 849.651848] env[63538]: DEBUG oslo.service.loopingcall [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 849.651848] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 849.651848] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-53fb3323-1f2b-499f-b615-2fe40e083ade {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.672236] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 849.672236] env[63538]: value = "task-5101045" [ 849.672236] env[63538]: _type = "Task" [ 849.672236] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.680718] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101045, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.700363] env[63538]: DEBUG nova.compute.manager [req-2d622e0f-5933-483a-bc31-0a717123ee14 req-8ddf528d-9b8e-4a77-ab54-c0902cbfefeb service nova] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Received event network-changed-9fa95b30-4f3b-4051-94dc-c49ea22b7275 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 849.700363] env[63538]: DEBUG nova.compute.manager [req-2d622e0f-5933-483a-bc31-0a717123ee14 req-8ddf528d-9b8e-4a77-ab54-c0902cbfefeb service nova] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Refreshing instance network info cache due to event network-changed-9fa95b30-4f3b-4051-94dc-c49ea22b7275. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 849.700363] env[63538]: DEBUG oslo_concurrency.lockutils [req-2d622e0f-5933-483a-bc31-0a717123ee14 req-8ddf528d-9b8e-4a77-ab54-c0902cbfefeb service nova] Acquiring lock "refresh_cache-0e718984-cfce-4620-9be6-fdcfb4954da8" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.700363] env[63538]: DEBUG oslo_concurrency.lockutils [req-2d622e0f-5933-483a-bc31-0a717123ee14 req-8ddf528d-9b8e-4a77-ab54-c0902cbfefeb service nova] Acquired lock "refresh_cache-0e718984-cfce-4620-9be6-fdcfb4954da8" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.700363] env[63538]: DEBUG nova.network.neutron [req-2d622e0f-5933-483a-bc31-0a717123ee14 req-8ddf528d-9b8e-4a77-ab54-c0902cbfefeb service nova] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Refreshing network info cache for port 9fa95b30-4f3b-4051-94dc-c49ea22b7275 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 849.741366] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b0ef009e-0192-4067-938e-c571f3cdd9c3 tempest-ServersWithSpecificFlavorTestJSON-1003773190 tempest-ServersWithSpecificFlavorTestJSON-1003773190-project-member] Lock "5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.275s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.102374] env[63538]: DEBUG oslo_vmware.api [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101042, 'name': ReconfigVM_Task, 'duration_secs': 0.837579} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.102685] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Reconfigured VM instance instance-00000034 to attach disk [datastore2] volume-bd560018-1c74-4223-aa48-6dd904889923/volume-bd560018-1c74-4223-aa48-6dd904889923.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 850.107572] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a5c2af0-ad9a-45e8-bd9e-4f8069c4850b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.123945] env[63538]: DEBUG oslo_vmware.api [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 850.123945] env[63538]: value = "task-5101046" [ 850.123945] env[63538]: _type = "Task" [ 850.123945] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.133509] env[63538]: DEBUG oslo_vmware.api [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101046, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.182556] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101045, 'name': CreateVM_Task, 'duration_secs': 0.410754} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.182843] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 850.183864] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.184134] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.184545] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 850.184825] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a46d4d2-8f5b-43a6-971e-e1be25439f6e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.190432] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Waiting for the task: (returnval){ [ 850.190432] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523fe6c3-ded9-e715-a8ce-16be17d2a294" [ 850.190432] env[63538]: _type = "Task" [ 850.190432] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.199833] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523fe6c3-ded9-e715-a8ce-16be17d2a294, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.499498] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f886569-079c-4b46-a4eb-140e1bfa58e8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.521379] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Updating instance 'a2e036ae-318b-44ea-9db0-10fa3838728b' progress to 0 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 850.533037] env[63538]: DEBUG nova.network.neutron [req-2d622e0f-5933-483a-bc31-0a717123ee14 req-8ddf528d-9b8e-4a77-ab54-c0902cbfefeb service nova] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Updated VIF entry in instance network info cache for port 9fa95b30-4f3b-4051-94dc-c49ea22b7275. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 850.533178] env[63538]: DEBUG nova.network.neutron [req-2d622e0f-5933-483a-bc31-0a717123ee14 req-8ddf528d-9b8e-4a77-ab54-c0902cbfefeb service nova] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Updating instance_info_cache with network_info: [{"id": "9fa95b30-4f3b-4051-94dc-c49ea22b7275", "address": "fa:16:3e:0e:aa:c3", "network": {"id": "cdb9e991-6d54-4556-aee9-e21aa418c5a3", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-543465917-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f7d0ccfeee04a17a6970f5dc7383f94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fa95b30-4f", "ovs_interfaceid": "9fa95b30-4f3b-4051-94dc-c49ea22b7275", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.638444] env[63538]: DEBUG oslo_vmware.api [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101046, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.703240] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523fe6c3-ded9-e715-a8ce-16be17d2a294, 'name': SearchDatastore_Task, 'duration_secs': 0.035312} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.706570] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 850.707171] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 850.707656] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.707970] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.708601] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 850.710169] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-afa5ab4b-7070-45f0-90ef-a905530652f9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.721657] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 850.721990] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 850.727028] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2602d04-eda1-483e-b14c-e18f411700ea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.736482] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Waiting for the task: (returnval){ [ 850.736482] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52147040-5896-3fac-c305-62d5d0e4b98e" [ 850.736482] env[63538]: _type = "Task" [ 850.736482] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.744021] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3763787b-2fb6-4496-b3da-e158b6cbd063 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.757417] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52147040-5896-3fac-c305-62d5d0e4b98e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.761446] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99dbd811-ab2a-4201-ab61-0f3b0277285a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.809699] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-548b7630-472a-4518-a891-bd166a305215 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.821691] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d4686b-6dc8-4ba9-9685-80d6dadf9152 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.838107] env[63538]: DEBUG nova.compute.provider_tree [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 851.032225] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 851.032225] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b9a07919-46aa-401e-b387-f2917ad90386 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.036543] env[63538]: DEBUG oslo_concurrency.lockutils [req-2d622e0f-5933-483a-bc31-0a717123ee14 req-8ddf528d-9b8e-4a77-ab54-c0902cbfefeb service nova] Releasing lock "refresh_cache-0e718984-cfce-4620-9be6-fdcfb4954da8" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.037033] env[63538]: DEBUG nova.compute.manager [req-2d622e0f-5933-483a-bc31-0a717123ee14 req-8ddf528d-9b8e-4a77-ab54-c0902cbfefeb service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Received event network-changed-696fc25d-fa83-4793-bffa-6bd2ce56f489 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 851.037362] env[63538]: DEBUG nova.compute.manager [req-2d622e0f-5933-483a-bc31-0a717123ee14 req-8ddf528d-9b8e-4a77-ab54-c0902cbfefeb service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Refreshing instance network info cache due to event network-changed-696fc25d-fa83-4793-bffa-6bd2ce56f489. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 851.037703] env[63538]: DEBUG oslo_concurrency.lockutils [req-2d622e0f-5933-483a-bc31-0a717123ee14 req-8ddf528d-9b8e-4a77-ab54-c0902cbfefeb service nova] Acquiring lock "refresh_cache-d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.037953] env[63538]: DEBUG oslo_concurrency.lockutils [req-2d622e0f-5933-483a-bc31-0a717123ee14 req-8ddf528d-9b8e-4a77-ab54-c0902cbfefeb service nova] Acquired lock "refresh_cache-d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.038266] env[63538]: DEBUG nova.network.neutron [req-2d622e0f-5933-483a-bc31-0a717123ee14 req-8ddf528d-9b8e-4a77-ab54-c0902cbfefeb service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Refreshing network info cache for port 696fc25d-fa83-4793-bffa-6bd2ce56f489 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 851.040906] env[63538]: DEBUG oslo_vmware.api [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 851.040906] env[63538]: value = "task-5101047" [ 851.040906] env[63538]: _type = "Task" [ 851.040906] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.051316] env[63538]: DEBUG oslo_vmware.api [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101047, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.136407] env[63538]: DEBUG oslo_vmware.api [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101046, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.251022] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52147040-5896-3fac-c305-62d5d0e4b98e, 'name': SearchDatastore_Task, 'duration_secs': 0.022121} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.251022] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-823d7f0f-0f19-48e2-a8ef-ba0581b472bc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.256896] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Waiting for the task: (returnval){ [ 851.256896] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5206988b-72b1-71d3-4acf-6e9baa269ff1" [ 851.256896] env[63538]: _type = "Task" [ 851.256896] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.266130] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5206988b-72b1-71d3-4acf-6e9baa269ff1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.321586] env[63538]: DEBUG oslo_concurrency.lockutils [None req-23c0240f-efc7-4926-945f-b875204bc3ac tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "466be7db-79e4-49fd-aa3b-56fbe5c60457" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.322722] env[63538]: DEBUG oslo_concurrency.lockutils [None req-23c0240f-efc7-4926-945f-b875204bc3ac tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "466be7db-79e4-49fd-aa3b-56fbe5c60457" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.322722] env[63538]: INFO nova.compute.manager [None req-23c0240f-efc7-4926-945f-b875204bc3ac tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Rebooting instance [ 851.341237] env[63538]: DEBUG nova.scheduler.client.report [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 851.557246] env[63538]: DEBUG oslo_vmware.api [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101047, 'name': PowerOffVM_Task, 'duration_secs': 0.516693} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.557555] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 851.557744] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Updating instance 'a2e036ae-318b-44ea-9db0-10fa3838728b' progress to 17 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 851.638856] env[63538]: DEBUG oslo_vmware.api [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101046, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.772860] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5206988b-72b1-71d3-4acf-6e9baa269ff1, 'name': SearchDatastore_Task, 'duration_secs': 0.009906} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.773213] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.773439] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 0e718984-cfce-4620-9be6-fdcfb4954da8/0e718984-cfce-4620-9be6-fdcfb4954da8.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 851.773722] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9c2b24e4-e802-40be-a1ed-7c87fcf517fb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.781679] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Waiting for the task: (returnval){ [ 851.781679] env[63538]: value = "task-5101048" [ 851.781679] env[63538]: _type = "Task" [ 851.781679] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.791315] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Task: {'id': task-5101048, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.838258] env[63538]: DEBUG nova.network.neutron [req-2d622e0f-5933-483a-bc31-0a717123ee14 req-8ddf528d-9b8e-4a77-ab54-c0902cbfefeb service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Updated VIF entry in instance network info cache for port 696fc25d-fa83-4793-bffa-6bd2ce56f489. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 851.838817] env[63538]: DEBUG nova.network.neutron [req-2d622e0f-5933-483a-bc31-0a717123ee14 req-8ddf528d-9b8e-4a77-ab54-c0902cbfefeb service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Updating instance_info_cache with network_info: [{"id": "696fc25d-fa83-4793-bffa-6bd2ce56f489", "address": "fa:16:3e:60:55:b3", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.154", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap696fc25d-fa", "ovs_interfaceid": "696fc25d-fa83-4793-bffa-6bd2ce56f489", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.847035] env[63538]: DEBUG oslo_concurrency.lockutils [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.633s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.847563] env[63538]: DEBUG nova.compute.manager [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 851.850973] env[63538]: DEBUG oslo_concurrency.lockutils [None req-23c0240f-efc7-4926-945f-b875204bc3ac tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "refresh_cache-466be7db-79e4-49fd-aa3b-56fbe5c60457" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.851167] env[63538]: DEBUG oslo_concurrency.lockutils [None req-23c0240f-efc7-4926-945f-b875204bc3ac tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired lock "refresh_cache-466be7db-79e4-49fd-aa3b-56fbe5c60457" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.851357] env[63538]: DEBUG nova.network.neutron [None req-23c0240f-efc7-4926-945f-b875204bc3ac tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 851.852563] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.676s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.853928] env[63538]: INFO nova.compute.claims [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 852.070773] env[63538]: DEBUG nova.virt.hardware [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 852.071115] env[63538]: DEBUG nova.virt.hardware [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 852.071298] env[63538]: DEBUG nova.virt.hardware [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 852.071491] env[63538]: DEBUG nova.virt.hardware [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 852.071648] env[63538]: DEBUG nova.virt.hardware [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 852.071803] env[63538]: DEBUG nova.virt.hardware [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 852.072045] env[63538]: DEBUG nova.virt.hardware [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 852.072277] env[63538]: DEBUG nova.virt.hardware [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 852.072549] env[63538]: DEBUG nova.virt.hardware [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 852.072743] env[63538]: DEBUG nova.virt.hardware [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 852.072907] env[63538]: DEBUG nova.virt.hardware [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 852.078633] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-866efe83-5f02-4ac6-9a4a-15aec3c7e6ea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.095629] env[63538]: DEBUG oslo_vmware.api [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 852.095629] env[63538]: value = "task-5101049" [ 852.095629] env[63538]: _type = "Task" [ 852.095629] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.105585] env[63538]: DEBUG oslo_vmware.api [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101049, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.142585] env[63538]: DEBUG oslo_vmware.api [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101046, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.296279] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Task: {'id': task-5101048, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.341560] env[63538]: DEBUG oslo_concurrency.lockutils [req-2d622e0f-5933-483a-bc31-0a717123ee14 req-8ddf528d-9b8e-4a77-ab54-c0902cbfefeb service nova] Releasing lock "refresh_cache-d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 852.360980] env[63538]: DEBUG nova.compute.utils [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 852.366465] env[63538]: DEBUG nova.compute.manager [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 852.366465] env[63538]: DEBUG nova.network.neutron [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 852.487498] env[63538]: DEBUG nova.policy [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '87c19c9ce3594acd96c1c215ef8ea555', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '422f50dc66ec48b7b262643390072f3d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 852.622230] env[63538]: DEBUG oslo_vmware.api [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101049, 'name': ReconfigVM_Task, 'duration_secs': 0.191041} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.622786] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Updating instance 'a2e036ae-318b-44ea-9db0-10fa3838728b' progress to 33 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 852.638843] env[63538]: DEBUG oslo_vmware.api [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101046, 'name': ReconfigVM_Task, 'duration_secs': 2.040898} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.639214] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992402', 'volume_id': 'bd560018-1c74-4223-aa48-6dd904889923', 'name': 'volume-bd560018-1c74-4223-aa48-6dd904889923', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd5d557c6-3d4e-4122-8756-218c9757fa01', 'attached_at': '', 'detached_at': '', 'volume_id': 'bd560018-1c74-4223-aa48-6dd904889923', 'serial': 'bd560018-1c74-4223-aa48-6dd904889923'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 852.756077] env[63538]: DEBUG nova.network.neutron [None req-23c0240f-efc7-4926-945f-b875204bc3ac tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Updating instance_info_cache with network_info: [{"id": "a679ee9b-3e51-4ce7-ab24-0792218d36ba", "address": "fa:16:3e:08:aa:47", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa679ee9b-3e", "ovs_interfaceid": "a679ee9b-3e51-4ce7-ab24-0792218d36ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.795017] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Task: {'id': task-5101048, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.839892} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.795017] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 0e718984-cfce-4620-9be6-fdcfb4954da8/0e718984-cfce-4620-9be6-fdcfb4954da8.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 852.795017] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 852.795428] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7c22cad8-43f5-4caf-a80e-89e1d2973ffd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.803320] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Waiting for the task: (returnval){ [ 852.803320] env[63538]: value = "task-5101050" [ 852.803320] env[63538]: _type = "Task" [ 852.803320] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.817076] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Task: {'id': task-5101050, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.868243] env[63538]: DEBUG nova.compute.manager [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 853.045776] env[63538]: DEBUG nova.network.neutron [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Successfully created port: a4df30fe-8921-4a26-89d6-e478ba9e2608 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 853.131022] env[63538]: DEBUG nova.virt.hardware [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:52:22Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='2a0f5711-293c-4327-a7c3-091f85550bf8',id=39,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-805814405',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 853.131022] env[63538]: DEBUG nova.virt.hardware [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 853.131022] env[63538]: DEBUG nova.virt.hardware [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 853.131022] env[63538]: DEBUG nova.virt.hardware [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 853.131022] env[63538]: DEBUG nova.virt.hardware [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 853.131022] env[63538]: DEBUG nova.virt.hardware [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 853.131022] env[63538]: DEBUG nova.virt.hardware [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 853.131381] env[63538]: DEBUG nova.virt.hardware [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 853.131687] env[63538]: DEBUG nova.virt.hardware [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 853.131974] env[63538]: DEBUG nova.virt.hardware [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 853.132319] env[63538]: DEBUG nova.virt.hardware [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 853.138802] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Reconfiguring VM instance instance-00000033 to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 853.141709] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56e0919e-2a85-4ff3-a93d-9e62cb5992ca {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.168226] env[63538]: DEBUG oslo_vmware.api [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 853.168226] env[63538]: value = "task-5101051" [ 853.168226] env[63538]: _type = "Task" [ 853.168226] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.178562] env[63538]: DEBUG oslo_vmware.api [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101051, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.258314] env[63538]: DEBUG oslo_concurrency.lockutils [None req-23c0240f-efc7-4926-945f-b875204bc3ac tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Releasing lock "refresh_cache-466be7db-79e4-49fd-aa3b-56fbe5c60457" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.260606] env[63538]: DEBUG nova.compute.manager [None req-23c0240f-efc7-4926-945f-b875204bc3ac tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 853.262093] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-033f4ca9-f530-4b9e-b4f0-fb3a8643f3b0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.320114] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Task: {'id': task-5101050, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074484} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.320468] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 853.321319] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a93fc599-bd72-48c8-8e3a-f30286eb39a9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.348464] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Reconfiguring VM instance instance-0000003d to attach disk [datastore2] 0e718984-cfce-4620-9be6-fdcfb4954da8/0e718984-cfce-4620-9be6-fdcfb4954da8.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 853.352216] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ee10acb-b682-4fb9-98c9-cec7204b8bee {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.375200] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Waiting for the task: (returnval){ [ 853.375200] env[63538]: value = "task-5101052" [ 853.375200] env[63538]: _type = "Task" [ 853.375200] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.390034] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Task: {'id': task-5101052, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.465357] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f3d8b59-d7a0-4c1e-8d46-cffc32c3e009 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.473858] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-773418e3-ad73-464f-ac3a-27d4a6ea1a79 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.505466] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac74fa01-ddea-48fc-8a10-8d9b260e7c75 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.513540] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e886726e-a52b-4843-b6d6-c1a7d533c448 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.527911] env[63538]: DEBUG nova.compute.provider_tree [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 853.554493] env[63538]: DEBUG oslo_concurrency.lockutils [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquiring lock "f9fa5578-acf3-416f-9cb0-8ceb00e5132d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.554764] env[63538]: DEBUG oslo_concurrency.lockutils [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "f9fa5578-acf3-416f-9cb0-8ceb00e5132d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.554978] env[63538]: DEBUG oslo_concurrency.lockutils [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquiring lock "f9fa5578-acf3-416f-9cb0-8ceb00e5132d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.555184] env[63538]: DEBUG oslo_concurrency.lockutils [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "f9fa5578-acf3-416f-9cb0-8ceb00e5132d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.555360] env[63538]: DEBUG oslo_concurrency.lockutils [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "f9fa5578-acf3-416f-9cb0-8ceb00e5132d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.557583] env[63538]: INFO nova.compute.manager [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Terminating instance [ 853.559420] env[63538]: DEBUG nova.compute.manager [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 853.559620] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 853.560457] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b09508bf-27ba-455d-9839-becee90c0339 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.572836] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 853.573116] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5203c268-0fec-4882-97c5-9b67aad3908a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.585875] env[63538]: DEBUG oslo_vmware.api [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 853.585875] env[63538]: value = "task-5101053" [ 853.585875] env[63538]: _type = "Task" [ 853.585875] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.599800] env[63538]: DEBUG oslo_vmware.api [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101053, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.680431] env[63538]: DEBUG oslo_vmware.api [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101051, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.718919] env[63538]: DEBUG nova.objects.instance [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lazy-loading 'flavor' on Instance uuid d5d557c6-3d4e-4122-8756-218c9757fa01 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 853.888387] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Task: {'id': task-5101052, 'name': ReconfigVM_Task, 'duration_secs': 0.32131} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.888819] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Reconfigured VM instance instance-0000003d to attach disk [datastore2] 0e718984-cfce-4620-9be6-fdcfb4954da8/0e718984-cfce-4620-9be6-fdcfb4954da8.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 853.889548] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c23119ad-0a1f-4f38-87c8-8bb98e3f73da {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.892172] env[63538]: DEBUG nova.compute.manager [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 853.901431] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Waiting for the task: (returnval){ [ 853.901431] env[63538]: value = "task-5101054" [ 853.901431] env[63538]: _type = "Task" [ 853.901431] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.915089] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Task: {'id': task-5101054, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.928918] env[63538]: DEBUG nova.virt.hardware [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 853.929699] env[63538]: DEBUG nova.virt.hardware [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 853.929699] env[63538]: DEBUG nova.virt.hardware [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 853.930073] env[63538]: DEBUG nova.virt.hardware [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 853.930651] env[63538]: DEBUG nova.virt.hardware [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 853.930651] env[63538]: DEBUG nova.virt.hardware [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 853.932124] env[63538]: DEBUG nova.virt.hardware [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 853.932480] env[63538]: DEBUG nova.virt.hardware [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 853.933803] env[63538]: DEBUG nova.virt.hardware [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 853.933803] env[63538]: DEBUG nova.virt.hardware [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 853.933803] env[63538]: DEBUG nova.virt.hardware [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 853.934979] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e680ae-38c2-4b35-9944-0c4b4b1655bd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.944552] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fedfc244-1b8b-4220-b5af-63a243c68186 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.031610] env[63538]: DEBUG nova.scheduler.client.report [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 854.097435] env[63538]: DEBUG oslo_vmware.api [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101053, 'name': PowerOffVM_Task, 'duration_secs': 0.324429} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.097756] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 854.097901] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 854.098191] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-85be93cb-5281-4bb9-938a-11fb953a77b8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.177222] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 854.177613] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 854.177931] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Deleting the datastore file [datastore1] f9fa5578-acf3-416f-9cb0-8ceb00e5132d {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 854.179723] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8403d1eb-a46b-4cf3-b039-422cdfe4cb3b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.188211] env[63538]: DEBUG oslo_vmware.api [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101051, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.194502] env[63538]: DEBUG oslo_vmware.api [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for the task: (returnval){ [ 854.194502] env[63538]: value = "task-5101056" [ 854.194502] env[63538]: _type = "Task" [ 854.194502] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.207486] env[63538]: DEBUG oslo_vmware.api [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101056, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.226848] env[63538]: DEBUG oslo_concurrency.lockutils [None req-40ee0917-c89f-4a93-97b8-0103440919d5 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "d5d557c6-3d4e-4122-8756-218c9757fa01" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 10.376s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.285932] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73f43c5-040c-48ff-ae6e-32d3013e8500 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.293734] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-23c0240f-efc7-4926-945f-b875204bc3ac tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Doing hard reboot of VM {{(pid=63538) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1064}} [ 854.294053] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-44c13a48-90e4-4561-b7c6-d735ccfe471f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.301886] env[63538]: DEBUG oslo_vmware.api [None req-23c0240f-efc7-4926-945f-b875204bc3ac tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 854.301886] env[63538]: value = "task-5101057" [ 854.301886] env[63538]: _type = "Task" [ 854.301886] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.313394] env[63538]: DEBUG oslo_vmware.api [None req-23c0240f-efc7-4926-945f-b875204bc3ac tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101057, 'name': ResetVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.419586] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Task: {'id': task-5101054, 'name': Rename_Task, 'duration_secs': 0.181555} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.422883] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 854.422883] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5c1c54b9-9824-42b6-9711-8b2332a08204 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.430047] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Waiting for the task: (returnval){ [ 854.430047] env[63538]: value = "task-5101058" [ 854.430047] env[63538]: _type = "Task" [ 854.430047] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.443042] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Task: {'id': task-5101058, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.490869] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c0ee5d68-8413-4952-bd28-93c0fc94b513 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "d5d557c6-3d4e-4122-8756-218c9757fa01" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.491201] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c0ee5d68-8413-4952-bd28-93c0fc94b513 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "d5d557c6-3d4e-4122-8756-218c9757fa01" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.491429] env[63538]: DEBUG nova.compute.manager [None req-c0ee5d68-8413-4952-bd28-93c0fc94b513 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 854.492573] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c88ad7f4-a246-4dd5-84cf-bf52440a6445 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.501270] env[63538]: DEBUG nova.compute.manager [None req-c0ee5d68-8413-4952-bd28-93c0fc94b513 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63538) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 854.503541] env[63538]: DEBUG nova.objects.instance [None req-c0ee5d68-8413-4952-bd28-93c0fc94b513 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lazy-loading 'flavor' on Instance uuid d5d557c6-3d4e-4122-8756-218c9757fa01 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 854.540025] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.686s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.540025] env[63538]: DEBUG nova.compute.manager [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 854.541942] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.035s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.542358] env[63538]: DEBUG nova.objects.instance [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Lazy-loading 'resources' on Instance uuid f703cd1c-4b77-4a85-a91b-63a2bd0e84a9 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 854.684069] env[63538]: DEBUG oslo_vmware.api [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101051, 'name': ReconfigVM_Task, 'duration_secs': 1.420547} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.687640] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Reconfigured VM instance instance-00000033 to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 854.688372] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-577b0dfd-6935-4019-a145-93e7b29ed347 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.715596] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] a2e036ae-318b-44ea-9db0-10fa3838728b/a2e036ae-318b-44ea-9db0-10fa3838728b.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 854.720041] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf021493-88b0-47f8-98a4-a82865226063 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.736326] env[63538]: DEBUG nova.compute.manager [req-fe1be788-0400-4ee8-8c35-1edd093d5cb9 req-d7880a80-f862-486e-aee7-e63269e5d901 service nova] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Received event network-vif-plugged-a4df30fe-8921-4a26-89d6-e478ba9e2608 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 854.736326] env[63538]: DEBUG oslo_concurrency.lockutils [req-fe1be788-0400-4ee8-8c35-1edd093d5cb9 req-d7880a80-f862-486e-aee7-e63269e5d901 service nova] Acquiring lock "ade3cce6-5662-4199-96f4-398436f840d8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.736461] env[63538]: DEBUG oslo_concurrency.lockutils [req-fe1be788-0400-4ee8-8c35-1edd093d5cb9 req-d7880a80-f862-486e-aee7-e63269e5d901 service nova] Lock "ade3cce6-5662-4199-96f4-398436f840d8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.736591] env[63538]: DEBUG oslo_concurrency.lockutils [req-fe1be788-0400-4ee8-8c35-1edd093d5cb9 req-d7880a80-f862-486e-aee7-e63269e5d901 service nova] Lock "ade3cce6-5662-4199-96f4-398436f840d8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.736816] env[63538]: DEBUG nova.compute.manager [req-fe1be788-0400-4ee8-8c35-1edd093d5cb9 req-d7880a80-f862-486e-aee7-e63269e5d901 service nova] [instance: ade3cce6-5662-4199-96f4-398436f840d8] No waiting events found dispatching network-vif-plugged-a4df30fe-8921-4a26-89d6-e478ba9e2608 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 854.737071] env[63538]: WARNING nova.compute.manager [req-fe1be788-0400-4ee8-8c35-1edd093d5cb9 req-d7880a80-f862-486e-aee7-e63269e5d901 service nova] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Received unexpected event network-vif-plugged-a4df30fe-8921-4a26-89d6-e478ba9e2608 for instance with vm_state building and task_state spawning. [ 854.744075] env[63538]: DEBUG oslo_vmware.api [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Task: {'id': task-5101056, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.207018} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.745737] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 854.745948] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 854.746145] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 854.746382] env[63538]: INFO nova.compute.manager [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Took 1.19 seconds to destroy the instance on the hypervisor. [ 854.746569] env[63538]: DEBUG oslo.service.loopingcall [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 854.746840] env[63538]: DEBUG oslo_vmware.api [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 854.746840] env[63538]: value = "task-5101059" [ 854.746840] env[63538]: _type = "Task" [ 854.746840] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.747052] env[63538]: DEBUG nova.compute.manager [-] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 854.747151] env[63538]: DEBUG nova.network.neutron [-] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 854.817097] env[63538]: DEBUG oslo_vmware.api [None req-23c0240f-efc7-4926-945f-b875204bc3ac tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101057, 'name': ResetVM_Task, 'duration_secs': 0.147674} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.818144] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-23c0240f-efc7-4926-945f-b875204bc3ac tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Did hard reboot of VM {{(pid=63538) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1068}} [ 854.818144] env[63538]: DEBUG nova.compute.manager [None req-23c0240f-efc7-4926-945f-b875204bc3ac tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 854.821129] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c17885a-92a2-4e32-b712-1f0646f515e0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.854903] env[63538]: DEBUG nova.network.neutron [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Successfully updated port: a4df30fe-8921-4a26-89d6-e478ba9e2608 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 854.943987] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Task: {'id': task-5101058, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.007863] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0ee5d68-8413-4952-bd28-93c0fc94b513 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 855.008177] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-be98e77a-7c86-4e7a-840f-000acaf3fc26 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.017425] env[63538]: DEBUG oslo_vmware.api [None req-c0ee5d68-8413-4952-bd28-93c0fc94b513 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 855.017425] env[63538]: value = "task-5101060" [ 855.017425] env[63538]: _type = "Task" [ 855.017425] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.030777] env[63538]: DEBUG oslo_vmware.api [None req-c0ee5d68-8413-4952-bd28-93c0fc94b513 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101060, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.046207] env[63538]: DEBUG nova.compute.utils [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 855.051399] env[63538]: DEBUG nova.compute.manager [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 855.052184] env[63538]: DEBUG nova.network.neutron [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 855.128051] env[63538]: DEBUG nova.policy [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '16fdc041f4c74e0ea76ee8984f9786f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a701618902d411b8af203fdbb1069be', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 855.264732] env[63538]: DEBUG oslo_vmware.api [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101059, 'name': ReconfigVM_Task, 'duration_secs': 0.411433} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.265931] env[63538]: DEBUG nova.compute.manager [req-5434d8a7-2b70-46d5-ae2c-77c6202269a4 req-39e2487c-6277-41c6-8bc7-800100d8f32d service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Received event network-vif-deleted-a5dfe48b-4acc-472c-8e00-f936b4068ea5 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 855.266147] env[63538]: INFO nova.compute.manager [req-5434d8a7-2b70-46d5-ae2c-77c6202269a4 req-39e2487c-6277-41c6-8bc7-800100d8f32d service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Neutron deleted interface a5dfe48b-4acc-472c-8e00-f936b4068ea5; detaching it from the instance and deleting it from the info cache [ 855.266327] env[63538]: DEBUG nova.network.neutron [req-5434d8a7-2b70-46d5-ae2c-77c6202269a4 req-39e2487c-6277-41c6-8bc7-800100d8f32d service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.267520] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Reconfigured VM instance instance-00000033 to attach disk [datastore1] a2e036ae-318b-44ea-9db0-10fa3838728b/a2e036ae-318b-44ea-9db0-10fa3838728b.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 855.271197] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Updating instance 'a2e036ae-318b-44ea-9db0-10fa3838728b' progress to 50 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 855.339735] env[63538]: DEBUG oslo_concurrency.lockutils [None req-23c0240f-efc7-4926-945f-b875204bc3ac tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "466be7db-79e4-49fd-aa3b-56fbe5c60457" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.017s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.358050] env[63538]: DEBUG oslo_concurrency.lockutils [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "refresh_cache-ade3cce6-5662-4199-96f4-398436f840d8" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.358050] env[63538]: DEBUG oslo_concurrency.lockutils [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquired lock "refresh_cache-ade3cce6-5662-4199-96f4-398436f840d8" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.358205] env[63538]: DEBUG nova.network.neutron [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 855.448414] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Task: {'id': task-5101058, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.532781] env[63538]: DEBUG oslo_vmware.api [None req-c0ee5d68-8413-4952-bd28-93c0fc94b513 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101060, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.556161] env[63538]: DEBUG nova.compute.manager [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 855.602805] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5709337c-16a8-4a4a-a5a7-473b99bede8f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.614016] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f77198d-50de-4141-b797-22078650ee7e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.660425] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c61fd53-8cb9-4374-a5f1-903503fe46f8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.669400] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc052603-09ec-43e4-854e-54857a459e28 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.686632] env[63538]: DEBUG nova.compute.provider_tree [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 855.687646] env[63538]: DEBUG nova.network.neutron [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Successfully created port: 2906e927-8bc7-4651-b391-e6a376f3208b {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 855.746337] env[63538]: DEBUG nova.network.neutron [-] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.777859] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-36f30802-f485-45d5-b8fa-bd5d7f371957 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.780917] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf772895-0c6e-40bc-b8e5-4116a9524099 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.813977] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-807778a0-184c-498e-9bcd-338f29c26474 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.823770] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba2976ce-baed-4ddd-ab4d-5b510f1eda85 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.857995] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Updating instance 'a2e036ae-318b-44ea-9db0-10fa3838728b' progress to 67 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 855.878759] env[63538]: DEBUG nova.compute.manager [req-5434d8a7-2b70-46d5-ae2c-77c6202269a4 req-39e2487c-6277-41c6-8bc7-800100d8f32d service nova] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Detach interface failed, port_id=a5dfe48b-4acc-472c-8e00-f936b4068ea5, reason: Instance f9fa5578-acf3-416f-9cb0-8ceb00e5132d could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 855.942652] env[63538]: DEBUG oslo_vmware.api [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Task: {'id': task-5101058, 'name': PowerOnVM_Task, 'duration_secs': 1.06877} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.943470] env[63538]: DEBUG nova.network.neutron [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 855.945415] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 855.945782] env[63538]: INFO nova.compute.manager [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Took 9.35 seconds to spawn the instance on the hypervisor. [ 855.945857] env[63538]: DEBUG nova.compute.manager [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 855.946730] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27540a2e-55fb-4a09-a299-5192fc92fbe9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.030354] env[63538]: DEBUG oslo_vmware.api [None req-c0ee5d68-8413-4952-bd28-93c0fc94b513 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101060, 'name': PowerOffVM_Task, 'duration_secs': 0.596856} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.030660] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0ee5d68-8413-4952-bd28-93c0fc94b513 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 856.030872] env[63538]: DEBUG nova.compute.manager [None req-c0ee5d68-8413-4952-bd28-93c0fc94b513 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 856.031894] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e98b3ed9-aa54-4e9f-9360-8cec93c2e498 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.190173] env[63538]: DEBUG nova.network.neutron [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Updating instance_info_cache with network_info: [{"id": "a4df30fe-8921-4a26-89d6-e478ba9e2608", "address": "fa:16:3e:df:8a:cb", "network": {"id": "4b4aacee-e2db-457b-9900-6c94f101831e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1899782517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "422f50dc66ec48b7b262643390072f3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4df30fe-89", "ovs_interfaceid": "a4df30fe-8921-4a26-89d6-e478ba9e2608", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.194894] env[63538]: DEBUG nova.scheduler.client.report [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 856.249142] env[63538]: INFO nova.compute.manager [-] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Took 1.50 seconds to deallocate network for instance. [ 856.440225] env[63538]: DEBUG nova.network.neutron [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Port 11d7dbc5-d269-456b-9a7a-601759e64b51 binding to destination host cpu-1 is already ACTIVE {{(pid=63538) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 856.468255] env[63538]: INFO nova.compute.manager [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Took 59.86 seconds to build instance. [ 856.549032] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c0ee5d68-8413-4952-bd28-93c0fc94b513 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "d5d557c6-3d4e-4122-8756-218c9757fa01" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.057s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.573060] env[63538]: DEBUG nova.compute.manager [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 856.608159] env[63538]: DEBUG nova.virt.hardware [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 856.608159] env[63538]: DEBUG nova.virt.hardware [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 856.608159] env[63538]: DEBUG nova.virt.hardware [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 856.608159] env[63538]: DEBUG nova.virt.hardware [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 856.608159] env[63538]: DEBUG nova.virt.hardware [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 856.608159] env[63538]: DEBUG nova.virt.hardware [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 856.608159] env[63538]: DEBUG nova.virt.hardware [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 856.608159] env[63538]: DEBUG nova.virt.hardware [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 856.608159] env[63538]: DEBUG nova.virt.hardware [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 856.608159] env[63538]: DEBUG nova.virt.hardware [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 856.608159] env[63538]: DEBUG nova.virt.hardware [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 856.610104] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-603383af-3edf-472b-be95-aa8712792075 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.621124] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad32ca7e-d198-46ee-bbe1-daa0d5c0d8ac {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.694044] env[63538]: DEBUG oslo_concurrency.lockutils [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Releasing lock "refresh_cache-ade3cce6-5662-4199-96f4-398436f840d8" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.694497] env[63538]: DEBUG nova.compute.manager [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Instance network_info: |[{"id": "a4df30fe-8921-4a26-89d6-e478ba9e2608", "address": "fa:16:3e:df:8a:cb", "network": {"id": "4b4aacee-e2db-457b-9900-6c94f101831e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1899782517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "422f50dc66ec48b7b262643390072f3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4df30fe-89", "ovs_interfaceid": "a4df30fe-8921-4a26-89d6-e478ba9e2608", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 856.694982] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:8a:cb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f972c061-0cd5-4aed-8cfb-42cc4a08835a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a4df30fe-8921-4a26-89d6-e478ba9e2608', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 856.702770] env[63538]: DEBUG oslo.service.loopingcall [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 856.703524] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.162s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.706307] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 856.706940] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.692s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.708487] env[63538]: INFO nova.compute.claims [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 856.711327] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b62fe757-57dc-4bd5-aeb0-b3a3c3f1e94c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.732848] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 856.732848] env[63538]: value = "task-5101061" [ 856.732848] env[63538]: _type = "Task" [ 856.732848] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.737503] env[63538]: INFO nova.scheduler.client.report [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Deleted allocations for instance f703cd1c-4b77-4a85-a91b-63a2bd0e84a9 [ 856.742151] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101061, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.758341] env[63538]: DEBUG oslo_concurrency.lockutils [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.902496] env[63538]: DEBUG nova.compute.manager [req-d5bd0ca8-8f8f-4196-a02f-663235ddc2b0 req-fa1b3b2e-f529-47ae-92b0-09469e99dea9 service nova] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Received event network-changed-a4df30fe-8921-4a26-89d6-e478ba9e2608 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 856.902496] env[63538]: DEBUG nova.compute.manager [req-d5bd0ca8-8f8f-4196-a02f-663235ddc2b0 req-fa1b3b2e-f529-47ae-92b0-09469e99dea9 service nova] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Refreshing instance network info cache due to event network-changed-a4df30fe-8921-4a26-89d6-e478ba9e2608. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 856.902880] env[63538]: DEBUG oslo_concurrency.lockutils [req-d5bd0ca8-8f8f-4196-a02f-663235ddc2b0 req-fa1b3b2e-f529-47ae-92b0-09469e99dea9 service nova] Acquiring lock "refresh_cache-ade3cce6-5662-4199-96f4-398436f840d8" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.902880] env[63538]: DEBUG oslo_concurrency.lockutils [req-d5bd0ca8-8f8f-4196-a02f-663235ddc2b0 req-fa1b3b2e-f529-47ae-92b0-09469e99dea9 service nova] Acquired lock "refresh_cache-ade3cce6-5662-4199-96f4-398436f840d8" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.903054] env[63538]: DEBUG nova.network.neutron [req-d5bd0ca8-8f8f-4196-a02f-663235ddc2b0 req-fa1b3b2e-f529-47ae-92b0-09469e99dea9 service nova] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Refreshing network info cache for port a4df30fe-8921-4a26-89d6-e478ba9e2608 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 856.972737] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cf4417b5-bce3-4439-894d-74e2f4cc849a tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Lock "0e718984-cfce-4620-9be6-fdcfb4954da8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.747s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.115434] env[63538]: DEBUG oslo_concurrency.lockutils [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Acquiring lock "0e718984-cfce-4620-9be6-fdcfb4954da8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.115796] env[63538]: DEBUG oslo_concurrency.lockutils [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Lock "0e718984-cfce-4620-9be6-fdcfb4954da8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.115933] env[63538]: DEBUG oslo_concurrency.lockutils [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Acquiring lock "0e718984-cfce-4620-9be6-fdcfb4954da8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.116148] env[63538]: DEBUG oslo_concurrency.lockutils [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Lock "0e718984-cfce-4620-9be6-fdcfb4954da8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.116326] env[63538]: DEBUG oslo_concurrency.lockutils [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Lock "0e718984-cfce-4620-9be6-fdcfb4954da8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.119070] env[63538]: INFO nova.compute.manager [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Terminating instance [ 857.121091] env[63538]: DEBUG nova.compute.manager [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 857.121294] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 857.122140] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-191fd920-d51c-4cf2-90e4-c54ac39fea7e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.130809] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 857.131187] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f7fbc8bb-dae3-42e4-bd43-720b773682ad {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.139544] env[63538]: DEBUG oslo_vmware.api [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Waiting for the task: (returnval){ [ 857.139544] env[63538]: value = "task-5101062" [ 857.139544] env[63538]: _type = "Task" [ 857.139544] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.143908] env[63538]: DEBUG nova.objects.instance [None req-93d6ad86-fc0e-4e53-a64b-e87d32febd63 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lazy-loading 'flavor' on Instance uuid d5d557c6-3d4e-4122-8756-218c9757fa01 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 857.151075] env[63538]: DEBUG oslo_vmware.api [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Task: {'id': task-5101062, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.243280] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101061, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.250287] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b0f6c5de-c7e8-4fc3-a5e0-c41e3efd7079 tempest-ServerGroupTestJSON-1945963659 tempest-ServerGroupTestJSON-1945963659-project-member] Lock "f703cd1c-4b77-4a85-a91b-63a2bd0e84a9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.655s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.469929] env[63538]: DEBUG oslo_concurrency.lockutils [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "a2e036ae-318b-44ea-9db0-10fa3838728b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.470109] env[63538]: DEBUG oslo_concurrency.lockutils [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "a2e036ae-318b-44ea-9db0-10fa3838728b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.470292] env[63538]: DEBUG oslo_concurrency.lockutils [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "a2e036ae-318b-44ea-9db0-10fa3838728b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.477467] env[63538]: DEBUG nova.compute.manager [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 857.490463] env[63538]: DEBUG nova.network.neutron [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Successfully updated port: 2906e927-8bc7-4651-b391-e6a376f3208b {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 857.653923] env[63538]: DEBUG oslo_vmware.api [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Task: {'id': task-5101062, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.655057] env[63538]: DEBUG oslo_concurrency.lockutils [None req-93d6ad86-fc0e-4e53-a64b-e87d32febd63 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "refresh_cache-d5d557c6-3d4e-4122-8756-218c9757fa01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.655676] env[63538]: DEBUG oslo_concurrency.lockutils [None req-93d6ad86-fc0e-4e53-a64b-e87d32febd63 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquired lock "refresh_cache-d5d557c6-3d4e-4122-8756-218c9757fa01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.655676] env[63538]: DEBUG nova.network.neutron [None req-93d6ad86-fc0e-4e53-a64b-e87d32febd63 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 857.655676] env[63538]: DEBUG nova.objects.instance [None req-93d6ad86-fc0e-4e53-a64b-e87d32febd63 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lazy-loading 'info_cache' on Instance uuid d5d557c6-3d4e-4122-8756-218c9757fa01 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 857.698551] env[63538]: DEBUG nova.network.neutron [req-d5bd0ca8-8f8f-4196-a02f-663235ddc2b0 req-fa1b3b2e-f529-47ae-92b0-09469e99dea9 service nova] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Updated VIF entry in instance network info cache for port a4df30fe-8921-4a26-89d6-e478ba9e2608. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 857.698551] env[63538]: DEBUG nova.network.neutron [req-d5bd0ca8-8f8f-4196-a02f-663235ddc2b0 req-fa1b3b2e-f529-47ae-92b0-09469e99dea9 service nova] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Updating instance_info_cache with network_info: [{"id": "a4df30fe-8921-4a26-89d6-e478ba9e2608", "address": "fa:16:3e:df:8a:cb", "network": {"id": "4b4aacee-e2db-457b-9900-6c94f101831e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1899782517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "422f50dc66ec48b7b262643390072f3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4df30fe-89", "ovs_interfaceid": "a4df30fe-8921-4a26-89d6-e478ba9e2608", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.744755] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101061, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.993740] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "refresh_cache-9c1f7da8-59f6-45bc-8d5f-23c8ec760829" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.993740] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired lock "refresh_cache-9c1f7da8-59f6-45bc-8d5f-23c8ec760829" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.994047] env[63538]: DEBUG nova.network.neutron [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 858.002171] env[63538]: DEBUG oslo_concurrency.lockutils [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.152800] env[63538]: DEBUG oslo_vmware.api [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Task: {'id': task-5101062, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.159180] env[63538]: DEBUG nova.objects.base [None req-93d6ad86-fc0e-4e53-a64b-e87d32febd63 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=63538) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 858.200831] env[63538]: DEBUG oslo_concurrency.lockutils [req-d5bd0ca8-8f8f-4196-a02f-663235ddc2b0 req-fa1b3b2e-f529-47ae-92b0-09469e99dea9 service nova] Releasing lock "refresh_cache-ade3cce6-5662-4199-96f4-398436f840d8" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.247812] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101061, 'name': CreateVM_Task, 'duration_secs': 1.456935} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.249177] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 858.249827] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e929dbc-2eeb-4475-9c77-15b02d8a25d7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.255589] env[63538]: DEBUG oslo_concurrency.lockutils [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.255838] env[63538]: DEBUG oslo_concurrency.lockutils [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.256635] env[63538]: DEBUG oslo_concurrency.lockutils [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 858.256635] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d677d142-bf9d-4a12-bbac-ee2d824c6b72 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.266340] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b959aaeb-9b76-47db-a0c1-4db2739502cc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.270237] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 858.270237] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524154af-ffce-150a-a685-0d2e72ead6fe" [ 858.270237] env[63538]: _type = "Task" [ 858.270237] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.304444] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b75900-fbc1-4723-b4db-64dea3160c04 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.311545] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524154af-ffce-150a-a685-0d2e72ead6fe, 'name': SearchDatastore_Task, 'duration_secs': 0.014235} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.312408] env[63538]: DEBUG oslo_concurrency.lockutils [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.312612] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 858.313136] env[63538]: DEBUG oslo_concurrency.lockutils [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.313308] env[63538]: DEBUG oslo_concurrency.lockutils [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.314067] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 858.314067] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d056b3e7-01b2-48bc-9d51-41cc9c7a0698 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.320233] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d593af-04dc-4f13-b2d4-7d479792883d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.326914] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 858.327164] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 858.335977] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eecd727f-3205-420d-8fee-4e06987c5ae7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.339911] env[63538]: DEBUG nova.compute.provider_tree [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 858.345555] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 858.345555] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d5bf65-fe8a-0a57-9684-c65c3bea9d95" [ 858.345555] env[63538]: _type = "Task" [ 858.345555] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.354956] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d5bf65-fe8a-0a57-9684-c65c3bea9d95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.555299] env[63538]: DEBUG oslo_concurrency.lockutils [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "refresh_cache-a2e036ae-318b-44ea-9db0-10fa3838728b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.555476] env[63538]: DEBUG oslo_concurrency.lockutils [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquired lock "refresh_cache-a2e036ae-318b-44ea-9db0-10fa3838728b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.555659] env[63538]: DEBUG nova.network.neutron [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 858.627623] env[63538]: DEBUG nova.network.neutron [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 858.650957] env[63538]: DEBUG oslo_vmware.api [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Task: {'id': task-5101062, 'name': PowerOffVM_Task, 'duration_secs': 1.211954} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.651264] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 858.651442] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 858.651706] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fe469797-848f-4bfa-b45e-3a30b4f473c4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.727804] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 858.728150] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 858.728862] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Deleting the datastore file [datastore2] 0e718984-cfce-4620-9be6-fdcfb4954da8 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 858.728862] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1cd96fe1-b8ad-4629-9c61-64f81b95e1ad {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.738432] env[63538]: DEBUG oslo_vmware.api [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Waiting for the task: (returnval){ [ 858.738432] env[63538]: value = "task-5101064" [ 858.738432] env[63538]: _type = "Task" [ 858.738432] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.747884] env[63538]: DEBUG oslo_vmware.api [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Task: {'id': task-5101064, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.845023] env[63538]: DEBUG nova.scheduler.client.report [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 858.860448] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d5bf65-fe8a-0a57-9684-c65c3bea9d95, 'name': SearchDatastore_Task, 'duration_secs': 0.017595} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.865362] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9793e430-a65d-4bc4-8ae5-a780f9255c34 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.873085] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 858.873085] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52de6e29-bf3e-31c3-bab7-92ec7bbe5410" [ 858.873085] env[63538]: _type = "Task" [ 858.873085] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.883175] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52de6e29-bf3e-31c3-bab7-92ec7bbe5410, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.922359] env[63538]: DEBUG nova.network.neutron [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Updating instance_info_cache with network_info: [{"id": "2906e927-8bc7-4651-b391-e6a376f3208b", "address": "fa:16:3e:a6:90:3e", "network": {"id": "8facc2ff-7a17-4beb-ad4f-7cf9d95cc93c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-137157780-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a701618902d411b8af203fdbb1069be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2906e927-8b", "ovs_interfaceid": "2906e927-8bc7-4651-b391-e6a376f3208b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.948484] env[63538]: DEBUG nova.compute.manager [req-3047fd88-6740-49cf-b48f-3d5270d324a7 req-fc3a5d42-c4a1-4c97-af89-921cd95528d8 service nova] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Received event network-vif-plugged-2906e927-8bc7-4651-b391-e6a376f3208b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 858.948710] env[63538]: DEBUG oslo_concurrency.lockutils [req-3047fd88-6740-49cf-b48f-3d5270d324a7 req-fc3a5d42-c4a1-4c97-af89-921cd95528d8 service nova] Acquiring lock "9c1f7da8-59f6-45bc-8d5f-23c8ec760829-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.948937] env[63538]: DEBUG oslo_concurrency.lockutils [req-3047fd88-6740-49cf-b48f-3d5270d324a7 req-fc3a5d42-c4a1-4c97-af89-921cd95528d8 service nova] Lock "9c1f7da8-59f6-45bc-8d5f-23c8ec760829-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.954991] env[63538]: DEBUG oslo_concurrency.lockutils [req-3047fd88-6740-49cf-b48f-3d5270d324a7 req-fc3a5d42-c4a1-4c97-af89-921cd95528d8 service nova] Lock "9c1f7da8-59f6-45bc-8d5f-23c8ec760829-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.954991] env[63538]: DEBUG nova.compute.manager [req-3047fd88-6740-49cf-b48f-3d5270d324a7 req-fc3a5d42-c4a1-4c97-af89-921cd95528d8 service nova] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] No waiting events found dispatching network-vif-plugged-2906e927-8bc7-4651-b391-e6a376f3208b {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 858.954991] env[63538]: WARNING nova.compute.manager [req-3047fd88-6740-49cf-b48f-3d5270d324a7 req-fc3a5d42-c4a1-4c97-af89-921cd95528d8 service nova] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Received unexpected event network-vif-plugged-2906e927-8bc7-4651-b391-e6a376f3208b for instance with vm_state building and task_state spawning. [ 858.954991] env[63538]: DEBUG nova.compute.manager [req-3047fd88-6740-49cf-b48f-3d5270d324a7 req-fc3a5d42-c4a1-4c97-af89-921cd95528d8 service nova] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Received event network-changed-2906e927-8bc7-4651-b391-e6a376f3208b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 858.955246] env[63538]: DEBUG nova.compute.manager [req-3047fd88-6740-49cf-b48f-3d5270d324a7 req-fc3a5d42-c4a1-4c97-af89-921cd95528d8 service nova] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Refreshing instance network info cache due to event network-changed-2906e927-8bc7-4651-b391-e6a376f3208b. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 858.955342] env[63538]: DEBUG oslo_concurrency.lockutils [req-3047fd88-6740-49cf-b48f-3d5270d324a7 req-fc3a5d42-c4a1-4c97-af89-921cd95528d8 service nova] Acquiring lock "refresh_cache-9c1f7da8-59f6-45bc-8d5f-23c8ec760829" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.969309] env[63538]: DEBUG nova.network.neutron [None req-93d6ad86-fc0e-4e53-a64b-e87d32febd63 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Updating instance_info_cache with network_info: [{"id": "0d48de93-8e4f-4795-a582-f00e76e60047", "address": "fa:16:3e:ca:bf:a1", "network": {"id": "955bc6e2-cea0-4cf9-80fb-521ae0e565f0", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-726504029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9427981aac124f6aa0c4d8d45b0ae917", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c297fe21-cd0b-4226-813b-a65d2358d034", "external-id": "nsx-vlan-transportzone-98", "segmentation_id": 98, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d48de93-8e", "ovs_interfaceid": "0d48de93-8e4f-4795-a582-f00e76e60047", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.249734] env[63538]: DEBUG oslo_vmware.api [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Task: {'id': task-5101064, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.418776} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.252569] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 859.252816] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 859.252979] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 859.253187] env[63538]: INFO nova.compute.manager [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Took 2.13 seconds to destroy the instance on the hypervisor. [ 859.254342] env[63538]: DEBUG oslo.service.loopingcall [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 859.254712] env[63538]: DEBUG nova.compute.manager [-] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 859.254712] env[63538]: DEBUG nova.network.neutron [-] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 859.351996] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.645s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.352340] env[63538]: DEBUG nova.compute.manager [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 859.355179] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.256s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.356822] env[63538]: INFO nova.compute.claims [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 859.385016] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52de6e29-bf3e-31c3-bab7-92ec7bbe5410, 'name': SearchDatastore_Task, 'duration_secs': 0.0122} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.386038] env[63538]: DEBUG oslo_concurrency.lockutils [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.387143] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] ade3cce6-5662-4199-96f4-398436f840d8/ade3cce6-5662-4199-96f4-398436f840d8.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 859.387464] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-796d725b-25f0-4253-a47a-1b7d399b2702 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.399182] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 859.399182] env[63538]: value = "task-5101065" [ 859.399182] env[63538]: _type = "Task" [ 859.399182] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.411465] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101065, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.425906] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Releasing lock "refresh_cache-9c1f7da8-59f6-45bc-8d5f-23c8ec760829" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.425906] env[63538]: DEBUG nova.compute.manager [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Instance network_info: |[{"id": "2906e927-8bc7-4651-b391-e6a376f3208b", "address": "fa:16:3e:a6:90:3e", "network": {"id": "8facc2ff-7a17-4beb-ad4f-7cf9d95cc93c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-137157780-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a701618902d411b8af203fdbb1069be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2906e927-8b", "ovs_interfaceid": "2906e927-8bc7-4651-b391-e6a376f3208b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 859.425906] env[63538]: DEBUG oslo_concurrency.lockutils [req-3047fd88-6740-49cf-b48f-3d5270d324a7 req-fc3a5d42-c4a1-4c97-af89-921cd95528d8 service nova] Acquired lock "refresh_cache-9c1f7da8-59f6-45bc-8d5f-23c8ec760829" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.426312] env[63538]: DEBUG nova.network.neutron [req-3047fd88-6740-49cf-b48f-3d5270d324a7 req-fc3a5d42-c4a1-4c97-af89-921cd95528d8 service nova] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Refreshing network info cache for port 2906e927-8bc7-4651-b391-e6a376f3208b {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 859.427418] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:90:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b80dd748-3d7e-4a23-a38d-9e79a3881452', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2906e927-8bc7-4651-b391-e6a376f3208b', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 859.441320] env[63538]: DEBUG oslo.service.loopingcall [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 859.447327] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 859.447327] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-36a45ff2-3f4f-470d-b2d2-2bb952d176ae {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.471230] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 859.471230] env[63538]: value = "task-5101066" [ 859.471230] env[63538]: _type = "Task" [ 859.471230] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.475632] env[63538]: DEBUG oslo_concurrency.lockutils [None req-93d6ad86-fc0e-4e53-a64b-e87d32febd63 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Releasing lock "refresh_cache-d5d557c6-3d4e-4122-8756-218c9757fa01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.484777] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101066, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.485965] env[63538]: DEBUG nova.network.neutron [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Updating instance_info_cache with network_info: [{"id": "11d7dbc5-d269-456b-9a7a-601759e64b51", "address": "fa:16:3e:45:35:dd", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.81", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11d7dbc5-d2", "ovs_interfaceid": "11d7dbc5-d269-456b-9a7a-601759e64b51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.866400] env[63538]: DEBUG nova.compute.utils [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 859.869844] env[63538]: DEBUG nova.compute.manager [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 859.870349] env[63538]: DEBUG nova.network.neutron [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 859.918726] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101065, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.979560] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-93d6ad86-fc0e-4e53-a64b-e87d32febd63 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 859.979941] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-961cc69a-44ce-4690-97ba-bbfe5237117d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.985413] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101066, 'name': CreateVM_Task, 'duration_secs': 0.417933} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.986705] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 859.986835] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.987143] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.987685] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 859.987887] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fb25faf-d0f4-4cd9-8ad3-e2de3038537f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.992349] env[63538]: DEBUG oslo_concurrency.lockutils [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Releasing lock "refresh_cache-a2e036ae-318b-44ea-9db0-10fa3838728b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.996593] env[63538]: DEBUG oslo_vmware.api [None req-93d6ad86-fc0e-4e53-a64b-e87d32febd63 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 859.996593] env[63538]: value = "task-5101067" [ 859.996593] env[63538]: _type = "Task" [ 859.996593] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.003792] env[63538]: DEBUG oslo_vmware.api [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 860.003792] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cec30d-b37c-d686-9b43-e195badfeccb" [ 860.003792] env[63538]: _type = "Task" [ 860.003792] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.014168] env[63538]: DEBUG nova.policy [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aed31227672c4777b9d58e323e95c82b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3cd518094a084fc0be66f9a90ac2ad11', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 860.016124] env[63538]: DEBUG oslo_vmware.api [None req-93d6ad86-fc0e-4e53-a64b-e87d32febd63 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101067, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.023574] env[63538]: DEBUG oslo_vmware.api [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cec30d-b37c-d686-9b43-e195badfeccb, 'name': SearchDatastore_Task, 'duration_secs': 0.016863} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.023985] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.025264] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 860.025264] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 860.025264] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.025264] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 860.025264] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f5469174-704e-4099-94db-d76f17606f18 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.038261] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 860.038413] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 860.040524] env[63538]: DEBUG nova.network.neutron [req-3047fd88-6740-49cf-b48f-3d5270d324a7 req-fc3a5d42-c4a1-4c97-af89-921cd95528d8 service nova] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Updated VIF entry in instance network info cache for port 2906e927-8bc7-4651-b391-e6a376f3208b. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 860.041088] env[63538]: DEBUG nova.network.neutron [req-3047fd88-6740-49cf-b48f-3d5270d324a7 req-fc3a5d42-c4a1-4c97-af89-921cd95528d8 service nova] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Updating instance_info_cache with network_info: [{"id": "2906e927-8bc7-4651-b391-e6a376f3208b", "address": "fa:16:3e:a6:90:3e", "network": {"id": "8facc2ff-7a17-4beb-ad4f-7cf9d95cc93c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-137157780-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a701618902d411b8af203fdbb1069be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2906e927-8b", "ovs_interfaceid": "2906e927-8bc7-4651-b391-e6a376f3208b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.043607] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04846d38-86d9-4c49-baaa-21a706906df2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.051647] env[63538]: DEBUG oslo_vmware.api [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 860.051647] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527595db-40b5-8ab2-c8b2-4f0c2137abe0" [ 860.051647] env[63538]: _type = "Task" [ 860.051647] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.064356] env[63538]: DEBUG oslo_vmware.api [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527595db-40b5-8ab2-c8b2-4f0c2137abe0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.263016] env[63538]: DEBUG nova.network.neutron [-] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.378751] env[63538]: DEBUG nova.compute.manager [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 860.418439] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101065, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.576124} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.418439] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] ade3cce6-5662-4199-96f4-398436f840d8/ade3cce6-5662-4199-96f4-398436f840d8.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 860.418439] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 860.419421] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ecff67ac-2bab-4d35-9eab-223a824489c8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.433574] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 860.433574] env[63538]: value = "task-5101068" [ 860.433574] env[63538]: _type = "Task" [ 860.433574] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.448199] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101068, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.515206] env[63538]: DEBUG oslo_vmware.api [None req-93d6ad86-fc0e-4e53-a64b-e87d32febd63 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101067, 'name': PowerOnVM_Task, 'duration_secs': 0.50541} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.515721] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-93d6ad86-fc0e-4e53-a64b-e87d32febd63 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 860.516116] env[63538]: DEBUG nova.compute.manager [None req-93d6ad86-fc0e-4e53-a64b-e87d32febd63 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 860.517267] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1395ec8-f718-4932-be42-06bb88368466 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.531057] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03d08c90-3d05-4ba4-824d-49841ffd4886 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.552618] env[63538]: DEBUG oslo_concurrency.lockutils [req-3047fd88-6740-49cf-b48f-3d5270d324a7 req-fc3a5d42-c4a1-4c97-af89-921cd95528d8 service nova] Releasing lock "refresh_cache-9c1f7da8-59f6-45bc-8d5f-23c8ec760829" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.559622] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa31623-46b6-44fc-bb21-20dc41a61d11 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.571357] env[63538]: DEBUG oslo_vmware.api [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527595db-40b5-8ab2-c8b2-4f0c2137abe0, 'name': SearchDatastore_Task, 'duration_secs': 0.016185} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.574967] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Updating instance 'a2e036ae-318b-44ea-9db0-10fa3838728b' progress to 83 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 860.583254] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4765e86-aed1-4710-90ec-397603ead12c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.592104] env[63538]: DEBUG oslo_vmware.api [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 860.592104] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e2b286-21fd-aefc-f4de-5a34e03fbcae" [ 860.592104] env[63538]: _type = "Task" [ 860.592104] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.603168] env[63538]: DEBUG oslo_vmware.api [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e2b286-21fd-aefc-f4de-5a34e03fbcae, 'name': SearchDatastore_Task, 'duration_secs': 0.011077} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.606196] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.606497] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 9c1f7da8-59f6-45bc-8d5f-23c8ec760829/9c1f7da8-59f6-45bc-8d5f-23c8ec760829.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 860.606989] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-44d55791-ab72-4c67-87df-4b01456e12cc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.615925] env[63538]: DEBUG oslo_vmware.api [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 860.615925] env[63538]: value = "task-5101069" [ 860.615925] env[63538]: _type = "Task" [ 860.615925] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.628926] env[63538]: DEBUG oslo_vmware.api [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101069, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.766741] env[63538]: INFO nova.compute.manager [-] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Took 1.51 seconds to deallocate network for instance. [ 860.779271] env[63538]: DEBUG nova.network.neutron [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Successfully created port: 5487fc0a-b645-4a02-b47f-772c0eabd9d4 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 861.669744] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 861.670564] env[63538]: DEBUG oslo_concurrency.lockutils [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.681252] env[63538]: DEBUG nova.compute.manager [req-db2b16df-4d03-48ac-882d-6e8b0115e84e req-4bc25376-cf46-41c9-ada9-627b5109ded7 service nova] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Received event network-vif-deleted-9fa95b30-4f3b-4051-94dc-c49ea22b7275 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 861.681252] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7d7ebc90-dd93-443e-b9ac-334a56e85756 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.697458] env[63538]: DEBUG oslo_vmware.api [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101069, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.830276} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.697458] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101068, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081909} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.698099] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 9c1f7da8-59f6-45bc-8d5f-23c8ec760829/9c1f7da8-59f6-45bc-8d5f-23c8ec760829.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 861.698335] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 861.698602] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 861.698927] env[63538]: DEBUG oslo_vmware.api [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 861.698927] env[63538]: value = "task-5101070" [ 861.698927] env[63538]: _type = "Task" [ 861.698927] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.699149] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1c5b8b44-304a-4cdd-84c7-681d4029e81c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.706443] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59220241-cf14-4f25-a89e-7feba8c7154f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.741691] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] ade3cce6-5662-4199-96f4-398436f840d8/ade3cce6-5662-4199-96f4-398436f840d8.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 861.750140] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dadc7064-3a55-46ea-9a0d-951cd9b0836d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.765423] env[63538]: DEBUG oslo_vmware.api [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101070, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.765771] env[63538]: DEBUG oslo_vmware.api [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 861.765771] env[63538]: value = "task-5101071" [ 861.765771] env[63538]: _type = "Task" [ 861.765771] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.772749] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 861.772749] env[63538]: value = "task-5101072" [ 861.772749] env[63538]: _type = "Task" [ 861.772749] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.779678] env[63538]: DEBUG oslo_vmware.api [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101071, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.787170] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101072, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.797698] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a36429be-9d96-4e0b-9155-6d504c6e104f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.807446] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee57435-bc51-4e1b-8a06-bad59f2f7c12 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.841438] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5493eed-9d77-4c5b-af76-dba65200358d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.852213] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c58250-461a-4335-9908-3a035cea487f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.868315] env[63538]: DEBUG nova.compute.provider_tree [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 862.173928] env[63538]: DEBUG nova.compute.manager [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 862.222021] env[63538]: DEBUG oslo_vmware.api [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101070, 'name': PowerOnVM_Task, 'duration_secs': 0.439879} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.222021] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 862.222021] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-115a99c8-f89b-4ff6-bf88-d3dce6969927 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Updating instance 'a2e036ae-318b-44ea-9db0-10fa3838728b' progress to 100 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 862.286417] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101072, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.286783] env[63538]: DEBUG oslo_vmware.api [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101071, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074053} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.287099] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 862.287955] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37cd0a61-835f-4aa8-80f9-fa6bf479c165 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.313843] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] 9c1f7da8-59f6-45bc-8d5f-23c8ec760829/9c1f7da8-59f6-45bc-8d5f-23c8ec760829.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 862.314912] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df443696-cd2a-4efc-8434-542f83d8edc9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.339264] env[63538]: DEBUG oslo_vmware.api [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 862.339264] env[63538]: value = "task-5101073" [ 862.339264] env[63538]: _type = "Task" [ 862.339264] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.347327] env[63538]: DEBUG nova.virt.hardware [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 862.347517] env[63538]: DEBUG nova.virt.hardware [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 862.347937] env[63538]: DEBUG nova.virt.hardware [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 862.347937] env[63538]: DEBUG nova.virt.hardware [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 862.348031] env[63538]: DEBUG nova.virt.hardware [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 862.348160] env[63538]: DEBUG nova.virt.hardware [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 862.348377] env[63538]: DEBUG nova.virt.hardware [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 862.348540] env[63538]: DEBUG nova.virt.hardware [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 862.348711] env[63538]: DEBUG nova.virt.hardware [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 862.348880] env[63538]: DEBUG nova.virt.hardware [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 862.349068] env[63538]: DEBUG nova.virt.hardware [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 862.349935] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d701467f-f178-425c-a1ee-6fe81074fec3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.356970] env[63538]: DEBUG oslo_vmware.api [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101073, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.363680] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc4d514-5863-4243-83fb-5a4d9c44836c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.371030] env[63538]: DEBUG nova.scheduler.client.report [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 862.783827] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101072, 'name': ReconfigVM_Task, 'duration_secs': 0.720335} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.784181] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Reconfigured VM instance instance-0000003e to attach disk [datastore2] ade3cce6-5662-4199-96f4-398436f840d8/ade3cce6-5662-4199-96f4-398436f840d8.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 862.784774] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-21757f3c-b943-44d1-9b2e-824fcc77e8ee {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.792743] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 862.792743] env[63538]: value = "task-5101074" [ 862.792743] env[63538]: _type = "Task" [ 862.792743] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.802180] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101074, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.850776] env[63538]: DEBUG oslo_vmware.api [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101073, 'name': ReconfigVM_Task, 'duration_secs': 0.315822} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.851069] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Reconfigured VM instance instance-0000003f to attach disk [datastore2] 9c1f7da8-59f6-45bc-8d5f-23c8ec760829/9c1f7da8-59f6-45bc-8d5f-23c8ec760829.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 862.851715] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0c0179f6-1f2c-43e8-8307-5aab4f1df04c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.859913] env[63538]: DEBUG oslo_vmware.api [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 862.859913] env[63538]: value = "task-5101075" [ 862.859913] env[63538]: _type = "Task" [ 862.859913] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.870241] env[63538]: DEBUG oslo_vmware.api [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101075, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.885794] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.530s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.886526] env[63538]: DEBUG nova.compute.manager [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 862.894774] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 47.357s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.895187] env[63538]: DEBUG nova.objects.instance [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Lazy-loading 'resources' on Instance uuid 5bf7ed57-62d5-4abc-96d8-78b979baed92 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 863.128554] env[63538]: DEBUG nova.compute.manager [req-893427aa-d581-494f-8aae-0eb96bfc6cdc req-ee0f4ea9-6ba1-4933-80d7-714553bf6680 service nova] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Received event network-vif-plugged-5487fc0a-b645-4a02-b47f-772c0eabd9d4 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 863.128660] env[63538]: DEBUG oslo_concurrency.lockutils [req-893427aa-d581-494f-8aae-0eb96bfc6cdc req-ee0f4ea9-6ba1-4933-80d7-714553bf6680 service nova] Acquiring lock "376ee3d9-e8b5-4f47-9622-b873126b492e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.128871] env[63538]: DEBUG oslo_concurrency.lockutils [req-893427aa-d581-494f-8aae-0eb96bfc6cdc req-ee0f4ea9-6ba1-4933-80d7-714553bf6680 service nova] Lock "376ee3d9-e8b5-4f47-9622-b873126b492e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.129130] env[63538]: DEBUG oslo_concurrency.lockutils [req-893427aa-d581-494f-8aae-0eb96bfc6cdc req-ee0f4ea9-6ba1-4933-80d7-714553bf6680 service nova] Lock "376ee3d9-e8b5-4f47-9622-b873126b492e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.129320] env[63538]: DEBUG nova.compute.manager [req-893427aa-d581-494f-8aae-0eb96bfc6cdc req-ee0f4ea9-6ba1-4933-80d7-714553bf6680 service nova] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] No waiting events found dispatching network-vif-plugged-5487fc0a-b645-4a02-b47f-772c0eabd9d4 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 863.129491] env[63538]: WARNING nova.compute.manager [req-893427aa-d581-494f-8aae-0eb96bfc6cdc req-ee0f4ea9-6ba1-4933-80d7-714553bf6680 service nova] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Received unexpected event network-vif-plugged-5487fc0a-b645-4a02-b47f-772c0eabd9d4 for instance with vm_state building and task_state spawning. [ 863.311080] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101074, 'name': Rename_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.374281] env[63538]: DEBUG oslo_vmware.api [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101075, 'name': Rename_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.400385] env[63538]: DEBUG nova.compute.utils [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 863.404397] env[63538]: DEBUG nova.compute.manager [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 863.405334] env[63538]: DEBUG nova.network.neutron [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 863.417423] env[63538]: DEBUG nova.network.neutron [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Successfully updated port: 5487fc0a-b645-4a02-b47f-772c0eabd9d4 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 863.499410] env[63538]: DEBUG nova.policy [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aed31227672c4777b9d58e323e95c82b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3cd518094a084fc0be66f9a90ac2ad11', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 863.810267] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101074, 'name': Rename_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.878150] env[63538]: DEBUG oslo_vmware.api [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101075, 'name': Rename_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.908031] env[63538]: DEBUG nova.compute.manager [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 863.918686] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Acquiring lock "refresh_cache-376ee3d9-e8b5-4f47-9622-b873126b492e" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.918686] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Acquired lock "refresh_cache-376ee3d9-e8b5-4f47-9622-b873126b492e" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.918805] env[63538]: DEBUG nova.network.neutron [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 863.950912] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be5d7b22-e862-4e74-904f-b5726eaa0ed4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.960724] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fef0a7e-269b-4f71-9d0d-d7c2009e14d4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.003215] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01b3d3b5-2435-480d-bf34-1ac27a3a1fec {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.013978] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58537091-838b-4406-b6b2-12ad1cddfc49 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.019816] env[63538]: DEBUG nova.network.neutron [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Successfully created port: 68ccd913-2820-44c7-b00f-73f8c61e610e {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 864.035143] env[63538]: DEBUG nova.compute.provider_tree [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.309466] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101074, 'name': Rename_Task, 'duration_secs': 1.203951} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.311035] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 864.311035] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e4a0f41c-bf95-4c06-82b0-9ae4abad7b61 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.318537] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 864.318537] env[63538]: value = "task-5101076" [ 864.318537] env[63538]: _type = "Task" [ 864.318537] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.330960] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101076, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.374701] env[63538]: DEBUG oslo_vmware.api [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101075, 'name': Rename_Task, 'duration_secs': 1.168063} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.375078] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 864.375372] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-01c8bb89-34ca-41fc-95fb-e33c26f24c6e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.387148] env[63538]: DEBUG oslo_vmware.api [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 864.387148] env[63538]: value = "task-5101077" [ 864.387148] env[63538]: _type = "Task" [ 864.387148] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.403238] env[63538]: DEBUG oslo_vmware.api [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101077, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.541633] env[63538]: DEBUG nova.scheduler.client.report [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 864.549704] env[63538]: DEBUG nova.network.neutron [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 864.832964] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101076, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.848827] env[63538]: DEBUG nova.network.neutron [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Updating instance_info_cache with network_info: [{"id": "5487fc0a-b645-4a02-b47f-772c0eabd9d4", "address": "fa:16:3e:af:6a:a8", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.215", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5487fc0a-b6", "ovs_interfaceid": "5487fc0a-b645-4a02-b47f-772c0eabd9d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.907465] env[63538]: DEBUG oslo_vmware.api [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101077, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.924253] env[63538]: DEBUG nova.compute.manager [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 864.995890] env[63538]: DEBUG nova.virt.hardware [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 864.995890] env[63538]: DEBUG nova.virt.hardware [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 864.995890] env[63538]: DEBUG nova.virt.hardware [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 864.998190] env[63538]: DEBUG nova.virt.hardware [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 864.998190] env[63538]: DEBUG nova.virt.hardware [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 864.998190] env[63538]: DEBUG nova.virt.hardware [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 864.998443] env[63538]: DEBUG nova.virt.hardware [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 864.998443] env[63538]: DEBUG nova.virt.hardware [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 864.998721] env[63538]: DEBUG nova.virt.hardware [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 864.998957] env[63538]: DEBUG nova.virt.hardware [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 864.999181] env[63538]: DEBUG nova.virt.hardware [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 865.000201] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bcb580d-6d0c-4e90-97bb-404a8db019f6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.015077] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc8a2a4-49a1-4f7a-b5fd-c3270a2e2f0c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.048812] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.154s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.052951] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 47.348s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.053437] env[63538]: DEBUG nova.objects.instance [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Lazy-loading 'resources' on Instance uuid 6850191a-4190-4795-ae18-830b41a76085 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 865.086497] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Acquiring lock "f1838794-710c-4bea-9e73-f6912e1b69f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.087116] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Lock "f1838794-710c-4bea-9e73-f6912e1b69f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.096185] env[63538]: INFO nova.scheduler.client.report [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Deleted allocations for instance 5bf7ed57-62d5-4abc-96d8-78b979baed92 [ 865.268524] env[63538]: DEBUG nova.compute.manager [req-1531ca10-e923-4421-9368-39f13b0685d0 req-4439e472-17d1-4c08-9515-4db383e3995d service nova] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Received event network-changed-5487fc0a-b645-4a02-b47f-772c0eabd9d4 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 865.268524] env[63538]: DEBUG nova.compute.manager [req-1531ca10-e923-4421-9368-39f13b0685d0 req-4439e472-17d1-4c08-9515-4db383e3995d service nova] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Refreshing instance network info cache due to event network-changed-5487fc0a-b645-4a02-b47f-772c0eabd9d4. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 865.268524] env[63538]: DEBUG oslo_concurrency.lockutils [req-1531ca10-e923-4421-9368-39f13b0685d0 req-4439e472-17d1-4c08-9515-4db383e3995d service nova] Acquiring lock "refresh_cache-376ee3d9-e8b5-4f47-9622-b873126b492e" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.332446] env[63538]: DEBUG oslo_vmware.api [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101076, 'name': PowerOnVM_Task, 'duration_secs': 0.62366} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.334346] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 865.334346] env[63538]: INFO nova.compute.manager [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Took 11.44 seconds to spawn the instance on the hypervisor. [ 865.334346] env[63538]: DEBUG nova.compute.manager [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 865.335128] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44abd53-aa8e-4709-9add-ece9ba5d0608 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.357833] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Releasing lock "refresh_cache-376ee3d9-e8b5-4f47-9622-b873126b492e" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.358583] env[63538]: DEBUG nova.compute.manager [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Instance network_info: |[{"id": "5487fc0a-b645-4a02-b47f-772c0eabd9d4", "address": "fa:16:3e:af:6a:a8", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.215", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5487fc0a-b6", "ovs_interfaceid": "5487fc0a-b645-4a02-b47f-772c0eabd9d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 865.359158] env[63538]: DEBUG oslo_concurrency.lockutils [req-1531ca10-e923-4421-9368-39f13b0685d0 req-4439e472-17d1-4c08-9515-4db383e3995d service nova] Acquired lock "refresh_cache-376ee3d9-e8b5-4f47-9622-b873126b492e" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.359995] env[63538]: DEBUG nova.network.neutron [req-1531ca10-e923-4421-9368-39f13b0685d0 req-4439e472-17d1-4c08-9515-4db383e3995d service nova] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Refreshing network info cache for port 5487fc0a-b645-4a02-b47f-772c0eabd9d4 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 865.361998] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:af:6a:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24144f5a-050a-4f1e-8d8c-774dc16dc791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5487fc0a-b645-4a02-b47f-772c0eabd9d4', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 865.370662] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Creating folder: Project (3cd518094a084fc0be66f9a90ac2ad11). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 865.371249] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a590c2b-7456-4048-97b6-b276fdbc6a52 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.383369] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Created folder: Project (3cd518094a084fc0be66f9a90ac2ad11) in parent group-v992234. [ 865.383820] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Creating folder: Instances. Parent ref: group-v992408. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 865.384338] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-acbc16fb-6ca6-4cdf-8b5d-c6f6f3e4bcc0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.407817] env[63538]: DEBUG oslo_vmware.api [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101077, 'name': PowerOnVM_Task, 'duration_secs': 0.645933} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.409525] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Created folder: Instances in parent group-v992408. [ 865.409525] env[63538]: DEBUG oslo.service.loopingcall [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 865.409525] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 865.409525] env[63538]: INFO nova.compute.manager [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Took 8.84 seconds to spawn the instance on the hypervisor. [ 865.409525] env[63538]: DEBUG nova.compute.manager [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 865.409525] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 865.410275] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c31bc1dc-6ffd-413c-8a10-d04537a32966 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.413018] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9ddfcfa0-7c7c-4ce4-a47f-13aae31f8059 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.439638] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 865.439638] env[63538]: value = "task-5101080" [ 865.439638] env[63538]: _type = "Task" [ 865.439638] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.448916] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101080, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.549057] env[63538]: DEBUG nova.network.neutron [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Port 11d7dbc5-d269-456b-9a7a-601759e64b51 binding to destination host cpu-1 is already ACTIVE {{(pid=63538) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 865.549406] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "refresh_cache-a2e036ae-318b-44ea-9db0-10fa3838728b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.549598] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquired lock "refresh_cache-a2e036ae-318b-44ea-9db0-10fa3838728b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.549771] env[63538]: DEBUG nova.network.neutron [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 865.609241] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5db4f8b8-4680-4918-90ac-fe92dba2d017 tempest-ServersNegativeTestMultiTenantJSON-969202362 tempest-ServersNegativeTestMultiTenantJSON-969202362-project-member] Lock "5bf7ed57-62d5-4abc-96d8-78b979baed92" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 53.848s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.864379] env[63538]: INFO nova.compute.manager [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Took 61.97 seconds to build instance. [ 865.961032] env[63538]: INFO nova.compute.manager [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Took 59.81 seconds to build instance. [ 865.982667] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101080, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.099016] env[63538]: DEBUG nova.compute.manager [req-8bacc9d6-fcb7-4af8-b1ab-86e6ee27fe2c req-64f62ad2-2c9e-443d-a299-148669ad4a70 service nova] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Received event network-vif-plugged-68ccd913-2820-44c7-b00f-73f8c61e610e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 866.100532] env[63538]: DEBUG oslo_concurrency.lockutils [req-8bacc9d6-fcb7-4af8-b1ab-86e6ee27fe2c req-64f62ad2-2c9e-443d-a299-148669ad4a70 service nova] Acquiring lock "5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.100532] env[63538]: DEBUG oslo_concurrency.lockutils [req-8bacc9d6-fcb7-4af8-b1ab-86e6ee27fe2c req-64f62ad2-2c9e-443d-a299-148669ad4a70 service nova] Lock "5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.100532] env[63538]: DEBUG oslo_concurrency.lockutils [req-8bacc9d6-fcb7-4af8-b1ab-86e6ee27fe2c req-64f62ad2-2c9e-443d-a299-148669ad4a70 service nova] Lock "5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.100532] env[63538]: DEBUG nova.compute.manager [req-8bacc9d6-fcb7-4af8-b1ab-86e6ee27fe2c req-64f62ad2-2c9e-443d-a299-148669ad4a70 service nova] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] No waiting events found dispatching network-vif-plugged-68ccd913-2820-44c7-b00f-73f8c61e610e {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 866.100532] env[63538]: WARNING nova.compute.manager [req-8bacc9d6-fcb7-4af8-b1ab-86e6ee27fe2c req-64f62ad2-2c9e-443d-a299-148669ad4a70 service nova] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Received unexpected event network-vif-plugged-68ccd913-2820-44c7-b00f-73f8c61e610e for instance with vm_state building and task_state spawning. [ 866.127360] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-623701fd-f147-4187-80ab-09b66b33363c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.137119] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b2a2d6-15cd-4ff1-b7cd-e2bfbf95e779 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.182296] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa94bb5-1467-4422-85a9-e4001686576d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.191194] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c500b07-e2be-41d4-b475-1f4213bde8cb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.207165] env[63538]: DEBUG nova.compute.provider_tree [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 866.366138] env[63538]: DEBUG oslo_concurrency.lockutils [None req-697f5086-c6f5-4fa7-8b0c-61846de7cf5a tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "ade3cce6-5662-4199-96f4-398436f840d8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.671s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.396536] env[63538]: DEBUG nova.network.neutron [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Updating instance_info_cache with network_info: [{"id": "11d7dbc5-d269-456b-9a7a-601759e64b51", "address": "fa:16:3e:45:35:dd", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.81", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11d7dbc5-d2", "ovs_interfaceid": "11d7dbc5-d269-456b-9a7a-601759e64b51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.467393] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101080, 'name': CreateVM_Task, 'duration_secs': 0.716441} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.468295] env[63538]: DEBUG nova.network.neutron [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Successfully updated port: 68ccd913-2820-44c7-b00f-73f8c61e610e {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 866.469429] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 866.471046] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.471179] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.471506] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 866.471909] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54d3e85f-36f5-4110-96fa-f6bd73fc39b7 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "9c1f7da8-59f6-45bc-8d5f-23c8ec760829" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.656s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.472343] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93d2cfe1-50f8-4f95-aecd-a6216f45fbed {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.482378] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 866.482378] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c2d960-95a1-0bef-00c7-3f3405bf007c" [ 866.482378] env[63538]: _type = "Task" [ 866.482378] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.494720] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c2d960-95a1-0bef-00c7-3f3405bf007c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.613769] env[63538]: DEBUG nova.network.neutron [req-1531ca10-e923-4421-9368-39f13b0685d0 req-4439e472-17d1-4c08-9515-4db383e3995d service nova] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Updated VIF entry in instance network info cache for port 5487fc0a-b645-4a02-b47f-772c0eabd9d4. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 866.614272] env[63538]: DEBUG nova.network.neutron [req-1531ca10-e923-4421-9368-39f13b0685d0 req-4439e472-17d1-4c08-9515-4db383e3995d service nova] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Updating instance_info_cache with network_info: [{"id": "5487fc0a-b645-4a02-b47f-772c0eabd9d4", "address": "fa:16:3e:af:6a:a8", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.215", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5487fc0a-b6", "ovs_interfaceid": "5487fc0a-b645-4a02-b47f-772c0eabd9d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.714373] env[63538]: DEBUG nova.scheduler.client.report [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 866.757848] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Acquiring lock "e447c109-4cef-4cc7-9acf-61abc0f47482" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.758126] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Lock "e447c109-4cef-4cc7-9acf-61abc0f47482" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.826067] env[63538]: DEBUG nova.compute.manager [req-5ed54d3e-060f-4264-a480-78fe0d26c41a req-5754ec9f-6eef-4961-9364-5fd9726886f2 service nova] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Received event network-changed-68ccd913-2820-44c7-b00f-73f8c61e610e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 866.826250] env[63538]: DEBUG nova.compute.manager [req-5ed54d3e-060f-4264-a480-78fe0d26c41a req-5754ec9f-6eef-4961-9364-5fd9726886f2 service nova] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Refreshing instance network info cache due to event network-changed-68ccd913-2820-44c7-b00f-73f8c61e610e. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 866.826786] env[63538]: DEBUG oslo_concurrency.lockutils [req-5ed54d3e-060f-4264-a480-78fe0d26c41a req-5754ec9f-6eef-4961-9364-5fd9726886f2 service nova] Acquiring lock "refresh_cache-5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.827011] env[63538]: DEBUG oslo_concurrency.lockutils [req-5ed54d3e-060f-4264-a480-78fe0d26c41a req-5754ec9f-6eef-4961-9364-5fd9726886f2 service nova] Acquired lock "refresh_cache-5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.827148] env[63538]: DEBUG nova.network.neutron [req-5ed54d3e-060f-4264-a480-78fe0d26c41a req-5754ec9f-6eef-4961-9364-5fd9726886f2 service nova] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Refreshing network info cache for port 68ccd913-2820-44c7-b00f-73f8c61e610e {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 866.869138] env[63538]: DEBUG nova.compute.manager [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 866.899953] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Releasing lock "refresh_cache-a2e036ae-318b-44ea-9db0-10fa3838728b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.970845] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Acquiring lock "refresh_cache-5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.980021] env[63538]: DEBUG nova.compute.manager [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 866.998098] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c2d960-95a1-0bef-00c7-3f3405bf007c, 'name': SearchDatastore_Task, 'duration_secs': 0.028725} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.998502] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.998753] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 866.998999] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.999172] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.999402] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 866.999880] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-790c2f02-ca2f-49cc-a198-7e79b138f832 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.017763] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 867.017763] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 867.017763] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33934fc1-70f0-4317-8ffe-0ab7dbac55ea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.023596] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 867.023596] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]529a4a26-c3da-2920-36e1-bea7bc5768cc" [ 867.023596] env[63538]: _type = "Task" [ 867.023596] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.037070] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]529a4a26-c3da-2920-36e1-bea7bc5768cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.120309] env[63538]: DEBUG oslo_concurrency.lockutils [req-1531ca10-e923-4421-9368-39f13b0685d0 req-4439e472-17d1-4c08-9515-4db383e3995d service nova] Releasing lock "refresh_cache-376ee3d9-e8b5-4f47-9622-b873126b492e" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.221128] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.168s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.226691] env[63538]: DEBUG oslo_concurrency.lockutils [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 48.999s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.227106] env[63538]: DEBUG nova.objects.instance [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lazy-loading 'resources' on Instance uuid 79f4cdd9-219a-4440-9dd2-9b2a360965b1 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 867.264840] env[63538]: INFO nova.scheduler.client.report [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Deleted allocations for instance 6850191a-4190-4795-ae18-830b41a76085 [ 867.370909] env[63538]: DEBUG nova.network.neutron [req-5ed54d3e-060f-4264-a480-78fe0d26c41a req-5754ec9f-6eef-4961-9364-5fd9726886f2 service nova] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 867.394427] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.405125] env[63538]: DEBUG nova.compute.manager [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63538) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 867.405372] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.468225] env[63538]: DEBUG nova.network.neutron [req-5ed54d3e-060f-4264-a480-78fe0d26c41a req-5754ec9f-6eef-4961-9364-5fd9726886f2 service nova] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.510563] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.540021] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]529a4a26-c3da-2920-36e1-bea7bc5768cc, 'name': SearchDatastore_Task, 'duration_secs': 0.014354} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.540021] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbdb8bc1-c9d0-478d-9465-2fc0190c0257 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.547107] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 867.547107] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5228e8fe-5308-d20a-3ff6-d16d5e619769" [ 867.547107] env[63538]: _type = "Task" [ 867.547107] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.561153] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5228e8fe-5308-d20a-3ff6-d16d5e619769, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.567594] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ecceb2c-817f-457c-92b6-ad7eb1a17d00 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.575244] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b0cc1b65-6e23-457d-b681-225ff2a16d0c tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Suspending the VM {{(pid=63538) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1163}} [ 867.575244] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-ac22d968-58af-441e-9bbd-565134bc3dc2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.585059] env[63538]: DEBUG oslo_vmware.api [None req-b0cc1b65-6e23-457d-b681-225ff2a16d0c tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 867.585059] env[63538]: value = "task-5101081" [ 867.585059] env[63538]: _type = "Task" [ 867.585059] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.593144] env[63538]: DEBUG oslo_vmware.api [None req-b0cc1b65-6e23-457d-b681-225ff2a16d0c tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101081, 'name': SuspendVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.784589] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3e6b1f1f-0f2d-4f43-9e88-ec2b4c46456c tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Lock "6850191a-4190-4795-ae18-830b41a76085" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 53.726s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.974024] env[63538]: DEBUG oslo_concurrency.lockutils [req-5ed54d3e-060f-4264-a480-78fe0d26c41a req-5754ec9f-6eef-4961-9364-5fd9726886f2 service nova] Releasing lock "refresh_cache-5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.974024] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Acquired lock "refresh_cache-5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.974024] env[63538]: DEBUG nova.network.neutron [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 868.059720] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5228e8fe-5308-d20a-3ff6-d16d5e619769, 'name': SearchDatastore_Task, 'duration_secs': 0.012025} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.062674] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.063017] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 376ee3d9-e8b5-4f47-9622-b873126b492e/376ee3d9-e8b5-4f47-9622-b873126b492e.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 868.063807] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fdc515f1-3c23-42a4-8659-2cd6c9d00c14 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.075182] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 868.075182] env[63538]: value = "task-5101082" [ 868.075182] env[63538]: _type = "Task" [ 868.075182] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.089198] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101082, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.104918] env[63538]: DEBUG oslo_vmware.api [None req-b0cc1b65-6e23-457d-b681-225ff2a16d0c tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101081, 'name': SuspendVM_Task} progress is 62%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.244312] env[63538]: INFO nova.compute.manager [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Rebuilding instance [ 868.280423] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1864c670-6a5d-4619-9c8c-78d11573e09a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.301688] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4ab52cd-44e8-4a39-897d-634e90092282 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.367767] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acccaf9a-def0-4abd-b03f-afeffb3e8f75 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.379614] env[63538]: DEBUG nova.compute.manager [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 868.381229] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be986dca-7ea6-4c6b-a2ad-8072c53d56a9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.394732] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3257b405-1834-43ef-8a6c-b2dce6d32347 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.430272] env[63538]: DEBUG nova.compute.provider_tree [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 868.526187] env[63538]: DEBUG nova.network.neutron [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 868.585762] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101082, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.601838] env[63538]: DEBUG oslo_vmware.api [None req-b0cc1b65-6e23-457d-b681-225ff2a16d0c tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101081, 'name': SuspendVM_Task, 'duration_secs': 0.743185} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.602336] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b0cc1b65-6e23-457d-b681-225ff2a16d0c tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Suspended the VM {{(pid=63538) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1167}} [ 868.602439] env[63538]: DEBUG nova.compute.manager [None req-b0cc1b65-6e23-457d-b681-225ff2a16d0c tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 868.603301] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28774447-facd-4eaf-b046-603e2c1f1dfb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.837770] env[63538]: DEBUG nova.network.neutron [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Updating instance_info_cache with network_info: [{"id": "68ccd913-2820-44c7-b00f-73f8c61e610e", "address": "fa:16:3e:47:59:9c", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.209", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68ccd913-28", "ovs_interfaceid": "68ccd913-2820-44c7-b00f-73f8c61e610e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.911594] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 868.911957] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c988d4b-196e-4454-925f-a6802efd5d9e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.919807] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 868.919807] env[63538]: value = "task-5101083" [ 868.919807] env[63538]: _type = "Task" [ 868.919807] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.929936] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101083, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.932159] env[63538]: DEBUG nova.scheduler.client.report [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 869.087263] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101082, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.602697} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.087567] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 376ee3d9-e8b5-4f47-9622-b873126b492e/376ee3d9-e8b5-4f47-9622-b873126b492e.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 869.087778] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 869.088062] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-543c3686-f64c-49e6-8620-6a1006a7ce8d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.097547] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 869.097547] env[63538]: value = "task-5101084" [ 869.097547] env[63538]: _type = "Task" [ 869.097547] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.108571] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101084, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.340690] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Releasing lock "refresh_cache-5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.341658] env[63538]: DEBUG nova.compute.manager [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Instance network_info: |[{"id": "68ccd913-2820-44c7-b00f-73f8c61e610e", "address": "fa:16:3e:47:59:9c", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.209", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68ccd913-28", "ovs_interfaceid": "68ccd913-2820-44c7-b00f-73f8c61e610e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 869.342119] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:59:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24144f5a-050a-4f1e-8d8c-774dc16dc791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '68ccd913-2820-44c7-b00f-73f8c61e610e', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 869.350585] env[63538]: DEBUG oslo.service.loopingcall [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 869.350964] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 869.351322] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7ae396be-bb4c-4905-81de-be88c2abd985 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.376263] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 869.376263] env[63538]: value = "task-5101085" [ 869.376263] env[63538]: _type = "Task" [ 869.376263] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.386217] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101085, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.430595] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101083, 'name': PowerOffVM_Task, 'duration_secs': 0.227093} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.430909] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 869.431214] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 869.432065] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-672c122e-1fda-4258-842d-c69a21974131 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.438816] env[63538]: DEBUG oslo_concurrency.lockutils [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.212s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.443532] env[63538]: DEBUG oslo_concurrency.lockutils [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 46.385s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.445323] env[63538]: INFO nova.compute.claims [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 869.448131] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 869.448632] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1fdae10c-53bf-435f-aa22-96ea0916457f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.474984] env[63538]: INFO nova.scheduler.client.report [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Deleted allocations for instance 79f4cdd9-219a-4440-9dd2-9b2a360965b1 [ 869.519530] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 869.519766] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 869.519951] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Deleting the datastore file [datastore2] 9c1f7da8-59f6-45bc-8d5f-23c8ec760829 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 869.520539] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d5a09b3e-f579-4312-95ce-a77c851f534b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.529709] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 869.529709] env[63538]: value = "task-5101087" [ 869.529709] env[63538]: _type = "Task" [ 869.529709] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.547059] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101087, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.607385] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101084, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085095} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.607685] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 869.608842] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82e110fb-7382-4229-862e-fe8e59a078d5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.633699] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 376ee3d9-e8b5-4f47-9622-b873126b492e/376ee3d9-e8b5-4f47-9622-b873126b492e.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 869.634072] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-467b86f4-bfef-464d-a892-c3d04533b3cb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.658209] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 869.658209] env[63538]: value = "task-5101088" [ 869.658209] env[63538]: _type = "Task" [ 869.658209] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.668063] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101088, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.891023] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101085, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.985182] env[63538]: DEBUG oslo_concurrency.lockutils [None req-32eb103f-85e6-44cb-9eaf-53fad514778a tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "79f4cdd9-219a-4440-9dd2-9b2a360965b1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 54.809s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.040249] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101087, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.414013} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.040714] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 870.040714] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 870.041837] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 870.172537] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101088, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.388691] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101085, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.675924] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101088, 'name': ReconfigVM_Task, 'duration_secs': 0.518204} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.679522] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 376ee3d9-e8b5-4f47-9622-b873126b492e/376ee3d9-e8b5-4f47-9622-b873126b492e.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 870.682502] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ca92561b-052c-4b28-a12b-64f97129c7b3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.689291] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 870.689291] env[63538]: value = "task-5101089" [ 870.689291] env[63538]: _type = "Task" [ 870.689291] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.701785] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101089, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.891026] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101085, 'name': CreateVM_Task, 'duration_secs': 1.155818} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.891402] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 870.892203] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.892411] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.893018] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 870.893514] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e2eb9c7-fe84-46ee-831b-a649e3cfcd20 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.899761] env[63538]: DEBUG oslo_vmware.api [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 870.899761] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5224cebd-960d-71d6-fbc4-7590a4f3e138" [ 870.899761] env[63538]: _type = "Task" [ 870.899761] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.917606] env[63538]: DEBUG oslo_vmware.api [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5224cebd-960d-71d6-fbc4-7590a4f3e138, 'name': SearchDatastore_Task, 'duration_secs': 0.010658} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.920958] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.921305] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 870.921580] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.921745] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.921970] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 870.922694] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1374fb30-17fd-40d0-a10d-ce7eb94bd57d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.934000] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 870.934298] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 870.935188] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6b44aaf-293b-40c1-91d3-b07e5a5b370e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.949074] env[63538]: DEBUG oslo_vmware.api [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 870.949074] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522ed5cf-d49d-bd10-e31a-df67ddea9947" [ 870.949074] env[63538]: _type = "Task" [ 870.949074] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.960998] env[63538]: DEBUG oslo_vmware.api [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522ed5cf-d49d-bd10-e31a-df67ddea9947, 'name': SearchDatastore_Task, 'duration_secs': 0.010819} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.962654] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fece5b66-4b53-4b29-aeb1-676e27a45eae {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.968239] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea7b795-7211-433c-9969-9e21f72ae26f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.975558] env[63538]: DEBUG oslo_vmware.api [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 870.975558] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b32c9b-4b7d-bec9-7ca7-76d94ae315bb" [ 870.975558] env[63538]: _type = "Task" [ 870.975558] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.981922] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce7cac24-4ad4-41fc-b02f-dc4e5e25b820 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.992056] env[63538]: DEBUG oslo_vmware.api [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b32c9b-4b7d-bec9-7ca7-76d94ae315bb, 'name': SearchDatastore_Task, 'duration_secs': 0.01385} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.019453] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.019746] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a/5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 871.020287] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-954fd5be-6be9-47fd-a5e8-31564c2b2a48 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.023289] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-903e64ba-c28c-4156-b59d-221ce472aa93 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.034551] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad904985-1bc1-4367-85d4-cc692ec0c24e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.040022] env[63538]: DEBUG oslo_vmware.api [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 871.040022] env[63538]: value = "task-5101090" [ 871.040022] env[63538]: _type = "Task" [ 871.040022] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.056952] env[63538]: DEBUG nova.compute.provider_tree [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 871.064926] env[63538]: DEBUG oslo_vmware.api [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101090, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.091338] env[63538]: DEBUG nova.virt.hardware [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 871.091482] env[63538]: DEBUG nova.virt.hardware [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 871.091626] env[63538]: DEBUG nova.virt.hardware [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 871.092426] env[63538]: DEBUG nova.virt.hardware [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 871.092426] env[63538]: DEBUG nova.virt.hardware [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 871.092426] env[63538]: DEBUG nova.virt.hardware [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 871.092734] env[63538]: DEBUG nova.virt.hardware [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 871.092734] env[63538]: DEBUG nova.virt.hardware [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 871.092876] env[63538]: DEBUG nova.virt.hardware [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 871.093347] env[63538]: DEBUG nova.virt.hardware [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 871.093555] env[63538]: DEBUG nova.virt.hardware [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 871.095270] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a5e3676-5a98-4fd3-b48e-b963699e0480 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.105425] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be41b7d4-7bf5-44b3-af08-cdc862feb066 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.121750] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:90:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b80dd748-3d7e-4a23-a38d-9e79a3881452', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2906e927-8bc7-4651-b391-e6a376f3208b', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 871.132174] env[63538]: DEBUG oslo.service.loopingcall [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 871.135020] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 871.135449] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f2be6a4c-0be3-4982-9077-c458b6c2291c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.159214] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 871.159214] env[63538]: value = "task-5101091" [ 871.159214] env[63538]: _type = "Task" [ 871.159214] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.168749] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101091, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.204867] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101089, 'name': Rename_Task, 'duration_secs': 0.285482} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.205660] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 871.205946] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6769dc21-359a-49bc-ad3b-1ffa23998fde {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.215297] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 871.215297] env[63538]: value = "task-5101092" [ 871.215297] env[63538]: _type = "Task" [ 871.215297] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.226271] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101092, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.551024] env[63538]: DEBUG oslo_vmware.api [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101090, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509927} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.551325] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a/5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 871.551674] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 871.551968] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e291a826-35bf-4a21-82ea-fb02faab0295 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.562529] env[63538]: DEBUG oslo_vmware.api [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 871.562529] env[63538]: value = "task-5101093" [ 871.562529] env[63538]: _type = "Task" [ 871.562529] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.575461] env[63538]: DEBUG oslo_vmware.api [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101093, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.587292] env[63538]: ERROR nova.scheduler.client.report [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [req-a881cfaa-0461-49a7-9e11-c25a22bc4624] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f65218a4-1d3d-476a-9093-01cae92c8635. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a881cfaa-0461-49a7-9e11-c25a22bc4624"}]} [ 871.609891] env[63538]: DEBUG nova.scheduler.client.report [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Refreshing inventories for resource provider f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 871.630294] env[63538]: DEBUG nova.scheduler.client.report [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Updating ProviderTree inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 871.630493] env[63538]: DEBUG nova.compute.provider_tree [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 871.638507] env[63538]: DEBUG nova.compute.manager [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 871.639534] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6385bc22-27ca-4b32-9538-586446cb5eae {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.650984] env[63538]: DEBUG nova.scheduler.client.report [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Refreshing aggregate associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, aggregates: None {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 871.673630] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101091, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.675328] env[63538]: DEBUG nova.scheduler.client.report [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Refreshing trait associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 871.728611] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101092, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.846267] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Acquiring lock "b47925eb-3d97-415b-9410-2e325da5ce79" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.846527] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Lock "b47925eb-3d97-415b-9410-2e325da5ce79" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.073915] env[63538]: DEBUG oslo_vmware.api [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101093, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072957} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.074239] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 872.075251] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13dd5fea-6eb7-457d-9d79-987d9f5b0693 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.101698] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a/5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 872.102301] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07f5aa24-b441-4946-8424-510d9cb2e310 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.128873] env[63538]: DEBUG oslo_vmware.api [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 872.128873] env[63538]: value = "task-5101094" [ 872.128873] env[63538]: _type = "Task" [ 872.128873] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.144230] env[63538]: DEBUG oslo_vmware.api [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101094, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.155655] env[63538]: INFO nova.compute.manager [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] instance snapshotting [ 872.155895] env[63538]: WARNING nova.compute.manager [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 872.160994] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6101bfde-4530-490c-a9a0-239190f1a790 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.188997] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101091, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.192533] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2acf2c13-03a6-40ec-bf2f-1a718c10d847 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.200434] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-436b287d-d621-449d-9410-4d6d7b03d28f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.213777] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "f5d92749-04d6-4935-8dc6-afb692222df0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.214157] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "f5d92749-04d6-4935-8dc6-afb692222df0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.217696] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83324b08-e8ae-40db-b62c-24a92a7a4ffe {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.231793] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101092, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.262979] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1769369a-03d4-4cd5-bf9e-f0a272134623 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.272642] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b94f412-1f4e-44d3-ae15-c0c54fc6fef7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.289281] env[63538]: DEBUG nova.compute.provider_tree [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 872.642841] env[63538]: DEBUG oslo_vmware.api [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101094, 'name': ReconfigVM_Task, 'duration_secs': 0.348084} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.643301] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Reconfigured VM instance instance-00000041 to attach disk [datastore1] 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a/5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 872.644184] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-79a786da-a1a2-476f-87fe-f8dce2cad6cc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.652350] env[63538]: DEBUG oslo_vmware.api [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 872.652350] env[63538]: value = "task-5101095" [ 872.652350] env[63538]: _type = "Task" [ 872.652350] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.664152] env[63538]: DEBUG oslo_vmware.api [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101095, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.676155] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101091, 'name': CreateVM_Task, 'duration_secs': 1.128997} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.676423] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 872.677438] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.677780] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.678276] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 872.678656] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8e4e595-a9af-4901-8b43-f985a55c300d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.686016] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 872.686016] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a6c7d2-a35f-56ac-e03e-6fcae4b0c19a" [ 872.686016] env[63538]: _type = "Task" [ 872.686016] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.694811] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a6c7d2-a35f-56ac-e03e-6fcae4b0c19a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.723462] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Creating Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 872.727617] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-190b1b71-92cb-47ff-802f-19651c421dde {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.735888] env[63538]: DEBUG oslo_vmware.api [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101092, 'name': PowerOnVM_Task, 'duration_secs': 1.272352} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.737316] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 872.737542] env[63538]: INFO nova.compute.manager [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Took 10.56 seconds to spawn the instance on the hypervisor. [ 872.737758] env[63538]: DEBUG nova.compute.manager [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 872.738089] env[63538]: DEBUG oslo_vmware.api [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 872.738089] env[63538]: value = "task-5101096" [ 872.738089] env[63538]: _type = "Task" [ 872.738089] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.738805] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-494f3f74-9512-4b9d-b956-0655bf5f526a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.809638] env[63538]: ERROR nova.scheduler.client.report [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [req-97d16a65-e2e0-4af6-9be2-1c4c513726cd] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f65218a4-1d3d-476a-9093-01cae92c8635. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-97d16a65-e2e0-4af6-9be2-1c4c513726cd"}]} [ 872.826998] env[63538]: DEBUG nova.scheduler.client.report [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Refreshing inventories for resource provider f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 872.841030] env[63538]: DEBUG nova.scheduler.client.report [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Updating ProviderTree inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 872.841695] env[63538]: DEBUG nova.compute.provider_tree [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 872.852843] env[63538]: DEBUG nova.scheduler.client.report [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Refreshing aggregate associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, aggregates: None {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 872.872963] env[63538]: DEBUG nova.scheduler.client.report [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Refreshing trait associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 873.164463] env[63538]: DEBUG oslo_vmware.api [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101095, 'name': Rename_Task, 'duration_secs': 0.320289} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.164799] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 873.167303] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a45f0740-4ff6-4ecd-82d7-b17d1cb6ec71 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.173464] env[63538]: DEBUG oslo_vmware.api [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 873.173464] env[63538]: value = "task-5101097" [ 873.173464] env[63538]: _type = "Task" [ 873.173464] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.181488] env[63538]: DEBUG oslo_vmware.api [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101097, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.196426] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a6c7d2-a35f-56ac-e03e-6fcae4b0c19a, 'name': SearchDatastore_Task, 'duration_secs': 0.048059} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.199104] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.199363] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 873.199601] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 873.199756] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.199950] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 873.200421] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b9be943f-b7a4-4196-ab71-c2c2595f6f98 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.220211] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 873.220211] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 873.220538] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1455b8e-6dd0-48c3-a811-ee1e2b4470a7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.228245] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 873.228245] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525d5fd3-c375-2124-13d4-b791cb0cc1fd" [ 873.228245] env[63538]: _type = "Task" [ 873.228245] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.236937] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525d5fd3-c375-2124-13d4-b791cb0cc1fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.250984] env[63538]: DEBUG oslo_vmware.api [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101096, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.262015] env[63538]: INFO nova.compute.manager [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Took 62.26 seconds to build instance. [ 873.301640] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b716ea5-1c7e-406a-9608-fec6996d9f75 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.310078] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac6ff669-65b0-447d-96e5-f220d3ae8675 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.342648] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8737de9-3617-4fd5-a7e0-f9ce5507e4fe {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.352446] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d2aff2c-447b-4e00-96c1-3dd7fef82b08 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.369197] env[63538]: DEBUG nova.compute.provider_tree [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 873.684995] env[63538]: DEBUG oslo_vmware.api [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101097, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.738394] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525d5fd3-c375-2124-13d4-b791cb0cc1fd, 'name': SearchDatastore_Task, 'duration_secs': 0.011841} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.739259] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30a49baa-78c2-44cc-9cc2-25462fc77cb1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.747216] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 873.747216] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522073d2-86c6-668c-e978-329ef3e5a86f" [ 873.747216] env[63538]: _type = "Task" [ 873.747216] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.754121] env[63538]: DEBUG oslo_vmware.api [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101096, 'name': CreateSnapshot_Task, 'duration_secs': 0.868039} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.754729] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Created Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 873.755482] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52ce82c-ba38-4bab-b913-4bce1be6b6fd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.761124] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522073d2-86c6-668c-e978-329ef3e5a86f, 'name': SearchDatastore_Task, 'duration_secs': 0.012707} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.761652] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.761932] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 9c1f7da8-59f6-45bc-8d5f-23c8ec760829/9c1f7da8-59f6-45bc-8d5f-23c8ec760829.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 873.762186] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-43a0493b-08ec-44d8-a671-5a9bef0086db {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.767242] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5f831984-e315-460d-b7f5-c4963c2e21bc tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lock "376ee3d9-e8b5-4f47-9622-b873126b492e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.948s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.778230] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 873.778230] env[63538]: value = "task-5101098" [ 873.778230] env[63538]: _type = "Task" [ 873.778230] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.787156] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101098, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.904191] env[63538]: DEBUG nova.scheduler.client.report [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Updated inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 with generation 93 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 873.904761] env[63538]: DEBUG nova.compute.provider_tree [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Updating resource provider f65218a4-1d3d-476a-9093-01cae92c8635 generation from 93 to 94 during operation: update_inventory {{(pid=63538) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 873.904907] env[63538]: DEBUG nova.compute.provider_tree [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 874.185892] env[63538]: DEBUG oslo_vmware.api [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101097, 'name': PowerOnVM_Task, 'duration_secs': 0.658906} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.186211] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 874.187018] env[63538]: INFO nova.compute.manager [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Took 9.26 seconds to spawn the instance on the hypervisor. [ 874.187018] env[63538]: DEBUG nova.compute.manager [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 874.187417] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecafb8bc-e649-47ef-ba74-d6d8f3d950ec {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.272268] env[63538]: DEBUG nova.compute.manager [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 874.282811] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Creating linked-clone VM from snapshot {{(pid=63538) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 874.284863] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7d88d00e-411a-4bfe-878d-8bd67b7d4b56 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.301184] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101098, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.303586] env[63538]: DEBUG oslo_vmware.api [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 874.303586] env[63538]: value = "task-5101099" [ 874.303586] env[63538]: _type = "Task" [ 874.303586] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.314167] env[63538]: DEBUG oslo_vmware.api [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101099, 'name': CloneVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.411795] env[63538]: DEBUG oslo_concurrency.lockutils [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.968s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.412633] env[63538]: DEBUG nova.compute.manager [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 874.416843] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 50.563s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.419642] env[63538]: INFO nova.compute.claims [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 874.706307] env[63538]: INFO nova.compute.manager [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Took 59.64 seconds to build instance. [ 874.806184] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101098, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.658022} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.812433] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 9c1f7da8-59f6-45bc-8d5f-23c8ec760829/9c1f7da8-59f6-45bc-8d5f-23c8ec760829.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 874.813079] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 874.814422] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.814922] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c4a1a2f7-973f-4bcb-ae71-d80bea9631f4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.825641] env[63538]: DEBUG oslo_vmware.api [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101099, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.827831] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 874.827831] env[63538]: value = "task-5101100" [ 874.827831] env[63538]: _type = "Task" [ 874.827831] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.840795] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101100, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.926449] env[63538]: DEBUG nova.compute.utils [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 874.928150] env[63538]: DEBUG nova.compute.manager [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 874.928360] env[63538]: DEBUG nova.network.neutron [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 875.012874] env[63538]: DEBUG nova.policy [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a11495c611974f26aaa6117bfda80179', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c3f6e933bf6c4e71af3b2a1e02d6e42f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 875.208548] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30bcae11-6d41-4c73-91b7-04490cf61f93 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lock "5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.056s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.319730] env[63538]: DEBUG oslo_vmware.api [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101099, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.346484] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101100, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083005} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.346949] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 875.347736] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcab6789-ddb9-41fe-99d0-0a5539180af4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.379480] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 9c1f7da8-59f6-45bc-8d5f-23c8ec760829/9c1f7da8-59f6-45bc-8d5f-23c8ec760829.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 875.380059] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1db7d522-b01b-4c0b-8d57-cddaeba07a71 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.397663] env[63538]: DEBUG nova.network.neutron [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Successfully created port: 2e8868e8-3746-43e8-906c-20e0cd0e7336 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 875.408146] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 875.408146] env[63538]: value = "task-5101101" [ 875.408146] env[63538]: _type = "Task" [ 875.408146] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.417512] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101101, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.434522] env[63538]: DEBUG nova.compute.manager [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 875.711555] env[63538]: DEBUG nova.compute.manager [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 875.821038] env[63538]: DEBUG oslo_vmware.api [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101099, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.925564] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101101, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.945666] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06c01e8-f3f5-4d43-8a26-cd0e220d047b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.957564] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad3460fe-7f7d-462a-b5bc-9159dca531fc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.001479] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fcd8385-f1cc-44d3-8904-1aa61ae66d87 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.011730] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585dbc78-5a3b-404d-90fb-48d68a2c8e73 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.027282] env[63538]: DEBUG nova.compute.provider_tree [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 876.239692] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 876.319135] env[63538]: DEBUG oslo_vmware.api [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101099, 'name': CloneVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.423453] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101101, 'name': ReconfigVM_Task, 'duration_secs': 0.839439} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.423748] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 9c1f7da8-59f6-45bc-8d5f-23c8ec760829/9c1f7da8-59f6-45bc-8d5f-23c8ec760829.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 876.424416] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-085f626d-566c-4ca9-a554-4a52bc979dcf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.431619] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 876.431619] env[63538]: value = "task-5101102" [ 876.431619] env[63538]: _type = "Task" [ 876.431619] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.440728] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101102, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.449130] env[63538]: DEBUG nova.compute.manager [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 876.476379] env[63538]: DEBUG nova.virt.hardware [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 876.476726] env[63538]: DEBUG nova.virt.hardware [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 876.476916] env[63538]: DEBUG nova.virt.hardware [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 876.477138] env[63538]: DEBUG nova.virt.hardware [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 876.477323] env[63538]: DEBUG nova.virt.hardware [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 876.477496] env[63538]: DEBUG nova.virt.hardware [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 876.477762] env[63538]: DEBUG nova.virt.hardware [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 876.477973] env[63538]: DEBUG nova.virt.hardware [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 876.478212] env[63538]: DEBUG nova.virt.hardware [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 876.478398] env[63538]: DEBUG nova.virt.hardware [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 876.478620] env[63538]: DEBUG nova.virt.hardware [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 876.479595] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e8f74f1-4ce5-4876-b27d-511437b32eb6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.488804] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e046bc7c-0801-4b60-b2e2-3fe6e5473bce {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.530069] env[63538]: DEBUG nova.scheduler.client.report [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 876.595035] env[63538]: DEBUG nova.compute.manager [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 876.595342] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a273cd-fc59-48bd-8272-7e3bd4af5775 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.818501] env[63538]: DEBUG oslo_vmware.api [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101099, 'name': CloneVM_Task, 'duration_secs': 2.083565} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.818820] env[63538]: INFO nova.virt.vmwareapi.vmops [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Created linked-clone VM from snapshot [ 876.819646] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-827d8974-743c-4eff-a1e1-0f0254c9c257 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.828831] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Uploading image b37dc44b-ac20-4e6d-abee-252421a4544c {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 876.852317] env[63538]: DEBUG oslo_vmware.rw_handles [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 876.852317] env[63538]: value = "vm-992414" [ 876.852317] env[63538]: _type = "VirtualMachine" [ 876.852317] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 876.852653] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-63565fff-9583-4972-bc52-5f9dd0ddbe1e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.861976] env[63538]: DEBUG oslo_vmware.rw_handles [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lease: (returnval){ [ 876.861976] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52effb38-d145-77ba-3295-ac5263a99678" [ 876.861976] env[63538]: _type = "HttpNfcLease" [ 876.861976] env[63538]: } obtained for exporting VM: (result){ [ 876.861976] env[63538]: value = "vm-992414" [ 876.861976] env[63538]: _type = "VirtualMachine" [ 876.861976] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 876.862356] env[63538]: DEBUG oslo_vmware.api [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the lease: (returnval){ [ 876.862356] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52effb38-d145-77ba-3295-ac5263a99678" [ 876.862356] env[63538]: _type = "HttpNfcLease" [ 876.862356] env[63538]: } to be ready. {{(pid=63538) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 876.869358] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 876.869358] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52effb38-d145-77ba-3295-ac5263a99678" [ 876.869358] env[63538]: _type = "HttpNfcLease" [ 876.869358] env[63538]: } is initializing. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 876.943583] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101102, 'name': Rename_Task, 'duration_secs': 0.34411} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.943583] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 876.943583] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c91e088d-d98b-42cd-9797-b625e22f5f56 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.950311] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 876.950311] env[63538]: value = "task-5101104" [ 876.950311] env[63538]: _type = "Task" [ 876.950311] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.958951] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101104, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.036015] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.619s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.036583] env[63538]: DEBUG nova.compute.manager [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 877.039507] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 46.211s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.107926] env[63538]: INFO nova.compute.manager [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] instance snapshotting [ 877.113904] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d1d86b-dfcd-4dbc-87a3-6e2b6281ec49 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.136276] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed1616dd-7a4d-4405-8b69-0b03dfe1c7b4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.156754] env[63538]: DEBUG nova.compute.manager [req-ba64c358-dcef-4552-b87b-fd72cbb6cf10 req-7a0d0e8f-70e6-4db9-8844-252a870ce58d service nova] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Received event network-vif-plugged-2e8868e8-3746-43e8-906c-20e0cd0e7336 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 877.157014] env[63538]: DEBUG oslo_concurrency.lockutils [req-ba64c358-dcef-4552-b87b-fd72cbb6cf10 req-7a0d0e8f-70e6-4db9-8844-252a870ce58d service nova] Acquiring lock "e79a9eeb-a4c4-4613-bc43-4e40103addf9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.157237] env[63538]: DEBUG oslo_concurrency.lockutils [req-ba64c358-dcef-4552-b87b-fd72cbb6cf10 req-7a0d0e8f-70e6-4db9-8844-252a870ce58d service nova] Lock "e79a9eeb-a4c4-4613-bc43-4e40103addf9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.157411] env[63538]: DEBUG oslo_concurrency.lockutils [req-ba64c358-dcef-4552-b87b-fd72cbb6cf10 req-7a0d0e8f-70e6-4db9-8844-252a870ce58d service nova] Lock "e79a9eeb-a4c4-4613-bc43-4e40103addf9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.157579] env[63538]: DEBUG nova.compute.manager [req-ba64c358-dcef-4552-b87b-fd72cbb6cf10 req-7a0d0e8f-70e6-4db9-8844-252a870ce58d service nova] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] No waiting events found dispatching network-vif-plugged-2e8868e8-3746-43e8-906c-20e0cd0e7336 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 877.157747] env[63538]: WARNING nova.compute.manager [req-ba64c358-dcef-4552-b87b-fd72cbb6cf10 req-7a0d0e8f-70e6-4db9-8844-252a870ce58d service nova] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Received unexpected event network-vif-plugged-2e8868e8-3746-43e8-906c-20e0cd0e7336 for instance with vm_state building and task_state spawning. [ 877.167532] env[63538]: DEBUG nova.network.neutron [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Successfully updated port: 2e8868e8-3746-43e8-906c-20e0cd0e7336 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 877.371268] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 877.371268] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52effb38-d145-77ba-3295-ac5263a99678" [ 877.371268] env[63538]: _type = "HttpNfcLease" [ 877.371268] env[63538]: } is ready. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 877.371654] env[63538]: DEBUG oslo_vmware.rw_handles [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 877.371654] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52effb38-d145-77ba-3295-ac5263a99678" [ 877.371654] env[63538]: _type = "HttpNfcLease" [ 877.371654] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 877.372582] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d261b61-615a-4a2d-b040-85bf99477893 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.380843] env[63538]: DEBUG oslo_vmware.rw_handles [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a884a7-700e-b162-40f6-58c6030cbea6/disk-0.vmdk from lease info. {{(pid=63538) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 877.381029] env[63538]: DEBUG oslo_vmware.rw_handles [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a884a7-700e-b162-40f6-58c6030cbea6/disk-0.vmdk for reading. {{(pid=63538) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 877.460635] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101104, 'name': PowerOnVM_Task} progress is 78%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.490371] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6e829ef7-7fe6-48a6-bb60-2254d5dea162 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.543488] env[63538]: DEBUG nova.compute.utils [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 877.544973] env[63538]: DEBUG nova.compute.manager [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 877.545209] env[63538]: DEBUG nova.network.neutron [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 877.606804] env[63538]: DEBUG nova.policy [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '861014e7810d4cf59cfa061acbb8f7eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4efc4733ea894fb7825e52b29ac8b6ba', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 877.647692] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Creating Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 877.648055] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ce0a7500-ac69-4a83-a457-2f00ff253d20 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.656314] env[63538]: DEBUG oslo_vmware.api [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 877.656314] env[63538]: value = "task-5101105" [ 877.656314] env[63538]: _type = "Task" [ 877.656314] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.666542] env[63538]: DEBUG oslo_vmware.api [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101105, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.670324] env[63538]: DEBUG oslo_concurrency.lockutils [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquiring lock "refresh_cache-e79a9eeb-a4c4-4613-bc43-4e40103addf9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.670524] env[63538]: DEBUG oslo_concurrency.lockutils [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquired lock "refresh_cache-e79a9eeb-a4c4-4613-bc43-4e40103addf9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.670760] env[63538]: DEBUG nova.network.neutron [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 877.964887] env[63538]: DEBUG oslo_vmware.api [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101104, 'name': PowerOnVM_Task, 'duration_secs': 0.91689} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.965451] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 877.966306] env[63538]: DEBUG nova.compute.manager [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 877.967240] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa85383-b262-4650-869f-4af063ebd540 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.057150] env[63538]: DEBUG nova.compute.manager [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 878.060577] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Applying migration context for instance a2e036ae-318b-44ea-9db0-10fa3838728b as it has an incoming, in-progress migration 0c630b5a-3695-4f8a-95d5-b51ed38cf5ce. Migration status is reverting {{(pid=63538) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 878.062443] env[63538]: INFO nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Updating resource usage from migration 0c630b5a-3695-4f8a-95d5-b51ed38cf5ce [ 878.088030] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance c8a02fa6-5232-4dde-b6dd-0da1089b6bbf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 878.088589] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 8fb62f47-cbf2-4b46-bc33-845e832f9ef0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 878.088589] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance ede967c0-ec3a-4f26-8290-0ee36890cd75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 878.088589] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 2e1b0bc7-3909-48e2-b9be-26822a57ee67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 878.089075] env[63538]: WARNING nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance bd222761-92aa-4f2c-a752-ead9c498ee7a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 878.089725] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 466be7db-79e4-49fd-aa3b-56fbe5c60457 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 878.089725] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance d5d557c6-3d4e-4122-8756-218c9757fa01 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 878.089725] env[63538]: WARNING nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance f9fa5578-acf3-416f-9cb0-8ceb00e5132d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 878.090804] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 87f8bb3e-6f32-4850-ac54-efad0befb268 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 878.090804] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance de68a921-bf67-4794-923d-4e062d8ff802 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 878.090804] env[63538]: WARNING nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 878.090804] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance d967631f-5c8a-42d8-ac05-4cec3bdb55cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 878.090804] env[63538]: WARNING nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 0e718984-cfce-4620-9be6-fdcfb4954da8 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 878.090994] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance ade3cce6-5662-4199-96f4-398436f840d8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 878.091108] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 9c1f7da8-59f6-45bc-8d5f-23c8ec760829 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 878.093807] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Migration 0c630b5a-3695-4f8a-95d5-b51ed38cf5ce is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1715}} [ 878.093807] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance a2e036ae-318b-44ea-9db0-10fa3838728b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 878.093807] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 376ee3d9-e8b5-4f47-9622-b873126b492e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 878.093807] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 878.093807] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance e79a9eeb-a4c4-4613-bc43-4e40103addf9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 878.093807] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance edc670dd-732a-4c54-924c-c99ee539d4d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 878.172733] env[63538]: DEBUG oslo_vmware.api [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101105, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.221783] env[63538]: DEBUG nova.network.neutron [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 878.278390] env[63538]: DEBUG nova.network.neutron [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Successfully created port: 39d5fcb4-d49b-4357-9446-9420f5d3407d {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 878.491135] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.496375] env[63538]: DEBUG nova.network.neutron [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Updating instance_info_cache with network_info: [{"id": "2e8868e8-3746-43e8-906c-20e0cd0e7336", "address": "fa:16:3e:4c:ad:86", "network": {"id": "8497b0d5-d275-4479-ae83-3ef4a1bb795b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-849919494-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3f6e933bf6c4e71af3b2a1e02d6e42f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ee018eb-75be-4037-a80a-07034d4eae35", "external-id": "nsx-vlan-transportzone-8", "segmentation_id": 8, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e8868e8-37", "ovs_interfaceid": "2e8868e8-3746-43e8-906c-20e0cd0e7336", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.595669] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance b0b4ae9c-95d3-47a1-86a7-120c88b60704 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 878.668942] env[63538]: DEBUG oslo_vmware.api [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101105, 'name': CreateSnapshot_Task, 'duration_secs': 0.713043} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.669307] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Created Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 878.670195] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58de456f-81d1-46cc-a015-b011e49a6180 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.999128] env[63538]: DEBUG oslo_concurrency.lockutils [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Releasing lock "refresh_cache-e79a9eeb-a4c4-4613-bc43-4e40103addf9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.999623] env[63538]: DEBUG nova.compute.manager [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Instance network_info: |[{"id": "2e8868e8-3746-43e8-906c-20e0cd0e7336", "address": "fa:16:3e:4c:ad:86", "network": {"id": "8497b0d5-d275-4479-ae83-3ef4a1bb795b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-849919494-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3f6e933bf6c4e71af3b2a1e02d6e42f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ee018eb-75be-4037-a80a-07034d4eae35", "external-id": "nsx-vlan-transportzone-8", "segmentation_id": 8, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e8868e8-37", "ovs_interfaceid": "2e8868e8-3746-43e8-906c-20e0cd0e7336", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 879.000143] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4c:ad:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ee018eb-75be-4037-a80a-07034d4eae35', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2e8868e8-3746-43e8-906c-20e0cd0e7336', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 879.010157] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Creating folder: Project (c3f6e933bf6c4e71af3b2a1e02d6e42f). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 879.011258] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-290f244f-4a1e-4d42-9c4e-9df3107cdb0a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.024504] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Created folder: Project (c3f6e933bf6c4e71af3b2a1e02d6e42f) in parent group-v992234. [ 879.025165] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Creating folder: Instances. Parent ref: group-v992416. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 879.025364] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fc135676-2fba-4b25-9322-be3c4b0c0681 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.037364] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Created folder: Instances in parent group-v992416. [ 879.037743] env[63538]: DEBUG oslo.service.loopingcall [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 879.038035] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 879.038337] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-32e67765-9fac-4bbf-89b9-a8b9b25e0a6e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.062199] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 879.062199] env[63538]: value = "task-5101108" [ 879.062199] env[63538]: _type = "Task" [ 879.062199] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.073376] env[63538]: DEBUG nova.compute.manager [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 879.075598] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101108, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.098457] env[63538]: DEBUG nova.virt.hardware [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 879.098683] env[63538]: DEBUG nova.virt.hardware [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 879.098826] env[63538]: DEBUG nova.virt.hardware [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 879.099018] env[63538]: DEBUG nova.virt.hardware [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 879.099299] env[63538]: DEBUG nova.virt.hardware [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 879.099399] env[63538]: DEBUG nova.virt.hardware [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 879.099742] env[63538]: DEBUG nova.virt.hardware [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 879.099959] env[63538]: DEBUG nova.virt.hardware [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 879.100175] env[63538]: DEBUG nova.virt.hardware [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 879.100175] env[63538]: DEBUG nova.virt.hardware [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 879.100542] env[63538]: DEBUG nova.virt.hardware [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 879.101627] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17911e49-f848-4ea0-9898-1816328aa945 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.106025] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 1db1d558-2473-49cb-b309-f7192bd6b9c1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 879.115372] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c410f72f-f2cb-4259-9618-46a222f96300 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.190631] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Creating linked-clone VM from snapshot {{(pid=63538) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 879.191026] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a8e7a833-24fb-4e50-90d4-910c94e561af {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.203152] env[63538]: DEBUG oslo_vmware.api [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 879.203152] env[63538]: value = "task-5101109" [ 879.203152] env[63538]: _type = "Task" [ 879.203152] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.214774] env[63538]: DEBUG oslo_vmware.api [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101109, 'name': CloneVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.265073] env[63538]: DEBUG nova.compute.manager [req-3a72336e-208c-4f1b-b10b-39a497af6280 req-8b352381-efae-4e35-be39-6d1dc3a48b73 service nova] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Received event network-changed-2e8868e8-3746-43e8-906c-20e0cd0e7336 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 879.265377] env[63538]: DEBUG nova.compute.manager [req-3a72336e-208c-4f1b-b10b-39a497af6280 req-8b352381-efae-4e35-be39-6d1dc3a48b73 service nova] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Refreshing instance network info cache due to event network-changed-2e8868e8-3746-43e8-906c-20e0cd0e7336. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 879.265739] env[63538]: DEBUG oslo_concurrency.lockutils [req-3a72336e-208c-4f1b-b10b-39a497af6280 req-8b352381-efae-4e35-be39-6d1dc3a48b73 service nova] Acquiring lock "refresh_cache-e79a9eeb-a4c4-4613-bc43-4e40103addf9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.265954] env[63538]: DEBUG oslo_concurrency.lockutils [req-3a72336e-208c-4f1b-b10b-39a497af6280 req-8b352381-efae-4e35-be39-6d1dc3a48b73 service nova] Acquired lock "refresh_cache-e79a9eeb-a4c4-4613-bc43-4e40103addf9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.266182] env[63538]: DEBUG nova.network.neutron [req-3a72336e-208c-4f1b-b10b-39a497af6280 req-8b352381-efae-4e35-be39-6d1dc3a48b73 service nova] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Refreshing network info cache for port 2e8868e8-3746-43e8-906c-20e0cd0e7336 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 879.573480] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101108, 'name': CreateVM_Task, 'duration_secs': 0.402788} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.573848] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 879.574560] env[63538]: DEBUG oslo_concurrency.lockutils [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.574775] env[63538]: DEBUG oslo_concurrency.lockutils [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.575136] env[63538]: DEBUG oslo_concurrency.lockutils [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 879.575416] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1b17fd3-37f3-4236-8027-899f900af517 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.584171] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Waiting for the task: (returnval){ [ 879.584171] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5233c3f5-bc0e-8c25-c656-07b346c3784a" [ 879.584171] env[63538]: _type = "Task" [ 879.584171] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.595532] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5233c3f5-bc0e-8c25-c656-07b346c3784a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.609620] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance fa8ed101-914d-4751-ab9b-f68ad5da7a56 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 879.646971] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "9c1f7da8-59f6-45bc-8d5f-23c8ec760829" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.647323] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "9c1f7da8-59f6-45bc-8d5f-23c8ec760829" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.647505] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "9c1f7da8-59f6-45bc-8d5f-23c8ec760829-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.647702] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "9c1f7da8-59f6-45bc-8d5f-23c8ec760829-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.647858] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "9c1f7da8-59f6-45bc-8d5f-23c8ec760829-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.650440] env[63538]: INFO nova.compute.manager [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Terminating instance [ 879.652882] env[63538]: DEBUG nova.compute.manager [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 879.653130] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 879.654034] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-572659bb-0280-4e01-a184-e9ae621e8f99 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.664038] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 879.664379] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-72f26546-7e1e-44c7-bd17-2917586588bb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.671489] env[63538]: DEBUG oslo_vmware.api [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 879.671489] env[63538]: value = "task-5101110" [ 879.671489] env[63538]: _type = "Task" [ 879.671489] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.681249] env[63538]: DEBUG oslo_vmware.api [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101110, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.717254] env[63538]: DEBUG oslo_vmware.api [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101109, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.099018] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5233c3f5-bc0e-8c25-c656-07b346c3784a, 'name': SearchDatastore_Task, 'duration_secs': 0.0171} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.099457] env[63538]: DEBUG oslo_concurrency.lockutils [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.099754] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 880.100046] env[63538]: DEBUG oslo_concurrency.lockutils [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.100222] env[63538]: DEBUG oslo_concurrency.lockutils [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.100435] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 880.100755] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6725156-815e-4bce-8c73-7c34465c6a2c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.112622] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 0df15328-aebd-44c5-9c78-ee05f188ad95 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 880.116130] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 880.116130] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 880.116723] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bd96dc5-a145-4717-a773-8201faa72cad {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.124312] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Waiting for the task: (returnval){ [ 880.124312] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526f4ac3-13c5-0bff-146f-a24cb04e5d2f" [ 880.124312] env[63538]: _type = "Task" [ 880.124312] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.136130] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526f4ac3-13c5-0bff-146f-a24cb04e5d2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.184616] env[63538]: DEBUG oslo_vmware.api [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101110, 'name': PowerOffVM_Task, 'duration_secs': 0.471363} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.184899] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 880.185092] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 880.185370] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea3e45f4-f9d2-467a-a267-330d9f30f9d9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.216890] env[63538]: DEBUG oslo_vmware.api [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101109, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.253986] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 880.254353] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 880.254644] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Deleting the datastore file [datastore1] 9c1f7da8-59f6-45bc-8d5f-23c8ec760829 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 880.254977] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d069ec6-1c6c-4ecf-b3b3-6d1ae2f3601d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.264555] env[63538]: DEBUG oslo_vmware.api [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 880.264555] env[63538]: value = "task-5101112" [ 880.264555] env[63538]: _type = "Task" [ 880.264555] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.276248] env[63538]: DEBUG oslo_vmware.api [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101112, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.276738] env[63538]: DEBUG nova.network.neutron [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Successfully updated port: 39d5fcb4-d49b-4357-9446-9420f5d3407d {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 880.375039] env[63538]: DEBUG nova.network.neutron [req-3a72336e-208c-4f1b-b10b-39a497af6280 req-8b352381-efae-4e35-be39-6d1dc3a48b73 service nova] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Updated VIF entry in instance network info cache for port 2e8868e8-3746-43e8-906c-20e0cd0e7336. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 880.375479] env[63538]: DEBUG nova.network.neutron [req-3a72336e-208c-4f1b-b10b-39a497af6280 req-8b352381-efae-4e35-be39-6d1dc3a48b73 service nova] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Updating instance_info_cache with network_info: [{"id": "2e8868e8-3746-43e8-906c-20e0cd0e7336", "address": "fa:16:3e:4c:ad:86", "network": {"id": "8497b0d5-d275-4479-ae83-3ef4a1bb795b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-849919494-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3f6e933bf6c4e71af3b2a1e02d6e42f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ee018eb-75be-4037-a80a-07034d4eae35", "external-id": "nsx-vlan-transportzone-8", "segmentation_id": 8, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e8868e8-37", "ovs_interfaceid": "2e8868e8-3746-43e8-906c-20e0cd0e7336", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.408443] env[63538]: DEBUG oslo_concurrency.lockutils [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "4ec5d3a2-8b29-4074-b323-f94704043b8b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.408443] env[63538]: DEBUG oslo_concurrency.lockutils [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "4ec5d3a2-8b29-4074-b323-f94704043b8b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.616074] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance edcc5700-7b1e-494a-82d1-844373a9d5a6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 880.636661] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526f4ac3-13c5-0bff-146f-a24cb04e5d2f, 'name': SearchDatastore_Task, 'duration_secs': 0.014193} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.637569] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51b51646-9ad1-4fc5-bc90-a6fedd346ef7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.646033] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Waiting for the task: (returnval){ [ 880.646033] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523952b8-0053-945f-e399-fb61396a0abc" [ 880.646033] env[63538]: _type = "Task" [ 880.646033] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.657416] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523952b8-0053-945f-e399-fb61396a0abc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.715603] env[63538]: DEBUG oslo_vmware.api [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101109, 'name': CloneVM_Task, 'duration_secs': 1.400527} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.716016] env[63538]: INFO nova.virt.vmwareapi.vmops [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Created linked-clone VM from snapshot [ 880.717139] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c27585-402d-453f-9473-b9eccd9c2f50 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.727714] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Uploading image 5d54543a-0c8e-4827-99bc-4a46030f7731 {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 880.755258] env[63538]: DEBUG oslo_vmware.rw_handles [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 880.755258] env[63538]: value = "vm-992419" [ 880.755258] env[63538]: _type = "VirtualMachine" [ 880.755258] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 880.755633] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-38e5d8f6-71f4-46dc-a6ff-4bd31b4d678d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.765750] env[63538]: DEBUG oslo_vmware.rw_handles [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lease: (returnval){ [ 880.765750] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527d4bfa-5b10-53a8-14ca-136b19a832d9" [ 880.765750] env[63538]: _type = "HttpNfcLease" [ 880.765750] env[63538]: } obtained for exporting VM: (result){ [ 880.765750] env[63538]: value = "vm-992419" [ 880.765750] env[63538]: _type = "VirtualMachine" [ 880.765750] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 880.766115] env[63538]: DEBUG oslo_vmware.api [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the lease: (returnval){ [ 880.766115] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527d4bfa-5b10-53a8-14ca-136b19a832d9" [ 880.766115] env[63538]: _type = "HttpNfcLease" [ 880.766115] env[63538]: } to be ready. {{(pid=63538) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 880.778909] env[63538]: DEBUG oslo_vmware.api [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101112, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.404903} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.781307] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 880.781726] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 880.782043] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 880.782336] env[63538]: INFO nova.compute.manager [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Took 1.13 seconds to destroy the instance on the hypervisor. [ 880.782672] env[63538]: DEBUG oslo.service.loopingcall [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 880.782930] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 880.782930] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527d4bfa-5b10-53a8-14ca-136b19a832d9" [ 880.782930] env[63538]: _type = "HttpNfcLease" [ 880.782930] env[63538]: } is initializing. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 880.783543] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquiring lock "refresh_cache-edc670dd-732a-4c54-924c-c99ee539d4d9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.783601] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquired lock "refresh_cache-edc670dd-732a-4c54-924c-c99ee539d4d9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.783764] env[63538]: DEBUG nova.network.neutron [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 880.785113] env[63538]: DEBUG nova.compute.manager [-] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 880.785334] env[63538]: DEBUG nova.network.neutron [-] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 880.878457] env[63538]: DEBUG oslo_concurrency.lockutils [req-3a72336e-208c-4f1b-b10b-39a497af6280 req-8b352381-efae-4e35-be39-6d1dc3a48b73 service nova] Releasing lock "refresh_cache-e79a9eeb-a4c4-4613-bc43-4e40103addf9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.119420] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance f1838794-710c-4bea-9e73-f6912e1b69f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 881.139710] env[63538]: DEBUG nova.compute.manager [req-9f4b79ea-6f71-4d19-be1e-484f1bdea7f9 req-8bbab609-e6c9-452d-b6e2-c9b4bcd48e8a service nova] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Received event network-vif-deleted-2906e927-8bc7-4651-b391-e6a376f3208b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 881.139973] env[63538]: INFO nova.compute.manager [req-9f4b79ea-6f71-4d19-be1e-484f1bdea7f9 req-8bbab609-e6c9-452d-b6e2-c9b4bcd48e8a service nova] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Neutron deleted interface 2906e927-8bc7-4651-b391-e6a376f3208b; detaching it from the instance and deleting it from the info cache [ 881.140210] env[63538]: DEBUG nova.network.neutron [req-9f4b79ea-6f71-4d19-be1e-484f1bdea7f9 req-8bbab609-e6c9-452d-b6e2-c9b4bcd48e8a service nova] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.157760] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523952b8-0053-945f-e399-fb61396a0abc, 'name': SearchDatastore_Task, 'duration_secs': 0.016839} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.158102] env[63538]: DEBUG oslo_concurrency.lockutils [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.158423] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] e79a9eeb-a4c4-4613-bc43-4e40103addf9/e79a9eeb-a4c4-4613-bc43-4e40103addf9.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 881.159027] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3ce5fef1-fbb0-4517-a80a-d63402748185 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.167444] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Waiting for the task: (returnval){ [ 881.167444] env[63538]: value = "task-5101114" [ 881.167444] env[63538]: _type = "Task" [ 881.167444] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.176673] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101114, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.278617] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 881.278617] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527d4bfa-5b10-53a8-14ca-136b19a832d9" [ 881.278617] env[63538]: _type = "HttpNfcLease" [ 881.278617] env[63538]: } is ready. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 881.278947] env[63538]: DEBUG oslo_vmware.rw_handles [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 881.278947] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527d4bfa-5b10-53a8-14ca-136b19a832d9" [ 881.278947] env[63538]: _type = "HttpNfcLease" [ 881.278947] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 881.279803] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6989e5dd-bb8d-4d01-9780-bf4bc09e29e6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.291552] env[63538]: DEBUG oslo_vmware.rw_handles [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523d7356-f11b-e01c-bd93-cb77d9a60c8b/disk-0.vmdk from lease info. {{(pid=63538) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 881.291826] env[63538]: DEBUG oslo_vmware.rw_handles [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523d7356-f11b-e01c-bd93-cb77d9a60c8b/disk-0.vmdk for reading. {{(pid=63538) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 881.348498] env[63538]: DEBUG nova.network.neutron [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 881.352893] env[63538]: DEBUG nova.compute.manager [req-30b516a7-1a97-47f7-94f4-4fa4cc94b9e6 req-949657d2-051e-4fb2-9b26-4f7e4b0ce634 service nova] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Received event network-vif-plugged-39d5fcb4-d49b-4357-9446-9420f5d3407d {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 881.353126] env[63538]: DEBUG oslo_concurrency.lockutils [req-30b516a7-1a97-47f7-94f4-4fa4cc94b9e6 req-949657d2-051e-4fb2-9b26-4f7e4b0ce634 service nova] Acquiring lock "edc670dd-732a-4c54-924c-c99ee539d4d9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.353332] env[63538]: DEBUG oslo_concurrency.lockutils [req-30b516a7-1a97-47f7-94f4-4fa4cc94b9e6 req-949657d2-051e-4fb2-9b26-4f7e4b0ce634 service nova] Lock "edc670dd-732a-4c54-924c-c99ee539d4d9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.353502] env[63538]: DEBUG oslo_concurrency.lockutils [req-30b516a7-1a97-47f7-94f4-4fa4cc94b9e6 req-949657d2-051e-4fb2-9b26-4f7e4b0ce634 service nova] Lock "edc670dd-732a-4c54-924c-c99ee539d4d9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.353730] env[63538]: DEBUG nova.compute.manager [req-30b516a7-1a97-47f7-94f4-4fa4cc94b9e6 req-949657d2-051e-4fb2-9b26-4f7e4b0ce634 service nova] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] No waiting events found dispatching network-vif-plugged-39d5fcb4-d49b-4357-9446-9420f5d3407d {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 881.353849] env[63538]: WARNING nova.compute.manager [req-30b516a7-1a97-47f7-94f4-4fa4cc94b9e6 req-949657d2-051e-4fb2-9b26-4f7e4b0ce634 service nova] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Received unexpected event network-vif-plugged-39d5fcb4-d49b-4357-9446-9420f5d3407d for instance with vm_state building and task_state spawning. [ 881.354023] env[63538]: DEBUG nova.compute.manager [req-30b516a7-1a97-47f7-94f4-4fa4cc94b9e6 req-949657d2-051e-4fb2-9b26-4f7e4b0ce634 service nova] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Received event network-changed-39d5fcb4-d49b-4357-9446-9420f5d3407d {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 881.354188] env[63538]: DEBUG nova.compute.manager [req-30b516a7-1a97-47f7-94f4-4fa4cc94b9e6 req-949657d2-051e-4fb2-9b26-4f7e4b0ce634 service nova] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Refreshing instance network info cache due to event network-changed-39d5fcb4-d49b-4357-9446-9420f5d3407d. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 881.354358] env[63538]: DEBUG oslo_concurrency.lockutils [req-30b516a7-1a97-47f7-94f4-4fa4cc94b9e6 req-949657d2-051e-4fb2-9b26-4f7e4b0ce634 service nova] Acquiring lock "refresh_cache-edc670dd-732a-4c54-924c-c99ee539d4d9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.392523] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6146230a-4d25-466b-b070-4c1f04bf9fde {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.567511] env[63538]: DEBUG nova.network.neutron [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Updating instance_info_cache with network_info: [{"id": "39d5fcb4-d49b-4357-9446-9420f5d3407d", "address": "fa:16:3e:7a:43:ac", "network": {"id": "19a71225-10fa-49f2-ac67-af1873417755", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1401406561-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4efc4733ea894fb7825e52b29ac8b6ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d39252e-42ef-4252-98d3-62af5a0d109d", "external-id": "nsx-vlan-transportzone-190", "segmentation_id": 190, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39d5fcb4-d4", "ovs_interfaceid": "39d5fcb4-d49b-4357-9446-9420f5d3407d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.586845] env[63538]: DEBUG nova.network.neutron [-] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.623337] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance e447c109-4cef-4cc7-9acf-61abc0f47482 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 881.642987] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6da061db-44e3-4beb-bb2f-8b93ca133d78 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.654727] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aae33ad-9357-476e-91df-46ef2f948a9e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.677900] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101114, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.698219] env[63538]: DEBUG nova.compute.manager [req-9f4b79ea-6f71-4d19-be1e-484f1bdea7f9 req-8bbab609-e6c9-452d-b6e2-c9b4bcd48e8a service nova] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Detach interface failed, port_id=2906e927-8bc7-4651-b391-e6a376f3208b, reason: Instance 9c1f7da8-59f6-45bc-8d5f-23c8ec760829 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 882.072057] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Releasing lock "refresh_cache-edc670dd-732a-4c54-924c-c99ee539d4d9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.072398] env[63538]: DEBUG nova.compute.manager [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Instance network_info: |[{"id": "39d5fcb4-d49b-4357-9446-9420f5d3407d", "address": "fa:16:3e:7a:43:ac", "network": {"id": "19a71225-10fa-49f2-ac67-af1873417755", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1401406561-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4efc4733ea894fb7825e52b29ac8b6ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d39252e-42ef-4252-98d3-62af5a0d109d", "external-id": "nsx-vlan-transportzone-190", "segmentation_id": 190, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39d5fcb4-d4", "ovs_interfaceid": "39d5fcb4-d49b-4357-9446-9420f5d3407d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 882.073012] env[63538]: DEBUG oslo_concurrency.lockutils [req-30b516a7-1a97-47f7-94f4-4fa4cc94b9e6 req-949657d2-051e-4fb2-9b26-4f7e4b0ce634 service nova] Acquired lock "refresh_cache-edc670dd-732a-4c54-924c-c99ee539d4d9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.073335] env[63538]: DEBUG nova.network.neutron [req-30b516a7-1a97-47f7-94f4-4fa4cc94b9e6 req-949657d2-051e-4fb2-9b26-4f7e4b0ce634 service nova] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Refreshing network info cache for port 39d5fcb4-d49b-4357-9446-9420f5d3407d {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 882.074838] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:43:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9d39252e-42ef-4252-98d3-62af5a0d109d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '39d5fcb4-d49b-4357-9446-9420f5d3407d', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 882.083158] env[63538]: DEBUG oslo.service.loopingcall [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 882.083795] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 882.084070] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ca6aeb14-c324-4d63-bee3-5781d93e81c2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.101538] env[63538]: INFO nova.compute.manager [-] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Took 1.32 seconds to deallocate network for instance. [ 882.110475] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 882.110475] env[63538]: value = "task-5101115" [ 882.110475] env[63538]: _type = "Task" [ 882.110475] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.120597] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101115, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.127963] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance b47925eb-3d97-415b-9410-2e325da5ce79 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 882.179061] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101114, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.631897] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.631897] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101115, 'name': CreateVM_Task, 'duration_secs': 0.489503} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.631897] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 882.631897] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.631897] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.631897] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 882.631897] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d351373-9e7c-4e6a-b744-05055e65b35f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.631897] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance f5d92749-04d6-4935-8dc6-afb692222df0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 882.631897] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=63538) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 882.631897] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3904MB phys_disk=100GB used_disk=17GB total_vcpus=48 used_vcpus=17 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '16', 'num_vm_active': '11', 'num_task_None': '11', 'num_os_type_None': '16', 'num_proj_7c1f0c999ede418c866074d9276050ff': '2', 'io_workload': '3', 'num_proj_3dc18da1ea704eeaaeb62633c4f76ee8': '2', 'num_proj_ea05f3fb4676466bb2a286f5a2fefb8f': '1', 'num_proj_0d6954a5254f441ca256c85330297cef': '1', 'num_vm_resized': '1', 'num_task_resize_reverting': '1', 'num_proj_9427981aac124f6aa0c4d8d45b0ae917': '1', 'num_proj_55edcd65da7b4a569a4c27aab4819cde': '2', 'num_vm_rescued': '1', 'num_proj_7063c42297c24f2baf7271fa25dec927': '1', 'num_vm_suspended': '1', 'num_task_image_uploading': '1', 'num_proj_422f50dc66ec48b7b262643390072f3d': '1', 'num_task_rebuild_spawning': '1', 'num_proj_2a701618902d411b8af203fdbb1069be': '1', 'num_task_image_snapshot': '1', 'num_proj_3cd518094a084fc0be66f9a90ac2ad11': '2', 'num_vm_building': '2', 'num_task_spawning': '1', 'num_proj_c3f6e933bf6c4e71af3b2a1e02d6e42f': '1', 'num_proj_4efc4733ea894fb7825e52b29ac8b6ba': '1'} {{(pid=63538) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 882.635758] env[63538]: DEBUG oslo_vmware.api [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 882.635758] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ac9c7c-1103-51ba-f407-16a458d863f9" [ 882.635758] env[63538]: _type = "Task" [ 882.635758] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.646347] env[63538]: DEBUG oslo_vmware.api [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ac9c7c-1103-51ba-f407-16a458d863f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.684389] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101114, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.849757] env[63538]: DEBUG nova.network.neutron [req-30b516a7-1a97-47f7-94f4-4fa4cc94b9e6 req-949657d2-051e-4fb2-9b26-4f7e4b0ce634 service nova] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Updated VIF entry in instance network info cache for port 39d5fcb4-d49b-4357-9446-9420f5d3407d. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 882.850523] env[63538]: DEBUG nova.network.neutron [req-30b516a7-1a97-47f7-94f4-4fa4cc94b9e6 req-949657d2-051e-4fb2-9b26-4f7e4b0ce634 service nova] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Updating instance_info_cache with network_info: [{"id": "39d5fcb4-d49b-4357-9446-9420f5d3407d", "address": "fa:16:3e:7a:43:ac", "network": {"id": "19a71225-10fa-49f2-ac67-af1873417755", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1401406561-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4efc4733ea894fb7825e52b29ac8b6ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d39252e-42ef-4252-98d3-62af5a0d109d", "external-id": "nsx-vlan-transportzone-190", "segmentation_id": 190, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39d5fcb4-d4", "ovs_interfaceid": "39d5fcb4-d49b-4357-9446-9420f5d3407d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.096564] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4518c558-8514-4acb-b046-89c9e0bce857 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.105715] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8604105-f9e5-49e8-9561-701449bfad90 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.142314] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0681e1f-12a8-414b-b61d-2150903646ac {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.151288] env[63538]: DEBUG oslo_vmware.api [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ac9c7c-1103-51ba-f407-16a458d863f9, 'name': SearchDatastore_Task, 'duration_secs': 0.015196} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.153838] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.154137] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 883.154246] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.154373] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.154560] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 883.154968] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5cc85700-d06f-41c1-ad09-5148f4afa26a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.157948] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ef11af-b05c-4582-a24f-60302de5cbe8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.172848] env[63538]: DEBUG nova.compute.provider_tree [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 883.178672] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 883.178672] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 883.179753] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94e2531f-0188-44ee-98f5-450b91891003 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.186398] env[63538]: DEBUG oslo_vmware.api [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 883.186398] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d012ff-adf5-9d3c-201f-ed1ea775ac41" [ 883.186398] env[63538]: _type = "Task" [ 883.186398] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.190697] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101114, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.877087} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.194137] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] e79a9eeb-a4c4-4613-bc43-4e40103addf9/e79a9eeb-a4c4-4613-bc43-4e40103addf9.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 883.194375] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 883.195018] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4bb06648-cd8e-4f14-a86e-ed4b668d1ff5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.206843] env[63538]: DEBUG oslo_vmware.api [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d012ff-adf5-9d3c-201f-ed1ea775ac41, 'name': SearchDatastore_Task, 'duration_secs': 0.016785} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.209076] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Waiting for the task: (returnval){ [ 883.209076] env[63538]: value = "task-5101116" [ 883.209076] env[63538]: _type = "Task" [ 883.209076] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.209350] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd237685-565b-4077-ac31-c662023c74ae {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.219176] env[63538]: DEBUG oslo_vmware.api [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 883.219176] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e2c671-2465-0efa-5b0e-d4caf6562ef7" [ 883.219176] env[63538]: _type = "Task" [ 883.219176] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.222758] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101116, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.235419] env[63538]: DEBUG oslo_vmware.api [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e2c671-2465-0efa-5b0e-d4caf6562ef7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.357098] env[63538]: DEBUG oslo_concurrency.lockutils [req-30b516a7-1a97-47f7-94f4-4fa4cc94b9e6 req-949657d2-051e-4fb2-9b26-4f7e4b0ce634 service nova] Releasing lock "refresh_cache-edc670dd-732a-4c54-924c-c99ee539d4d9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.681743] env[63538]: DEBUG nova.scheduler.client.report [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 883.721563] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101116, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.122718} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.721932] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 883.722760] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de134f8-021f-41f0-b770-5a1c551c33d2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.735160] env[63538]: DEBUG oslo_vmware.api [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e2c671-2465-0efa-5b0e-d4caf6562ef7, 'name': SearchDatastore_Task, 'duration_secs': 0.021222} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.745260] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.745613] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] edc670dd-732a-4c54-924c-c99ee539d4d9/edc670dd-732a-4c54-924c-c99ee539d4d9.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 883.754806] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] e79a9eeb-a4c4-4613-bc43-4e40103addf9/e79a9eeb-a4c4-4613-bc43-4e40103addf9.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 883.755204] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-58f133a6-a374-43e0-8730-e4c0ee94d975 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.757707] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a50645e-1c87-4c39-8f33-0fe0b7690271 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.778717] env[63538]: DEBUG oslo_vmware.api [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 883.778717] env[63538]: value = "task-5101117" [ 883.778717] env[63538]: _type = "Task" [ 883.778717] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.780354] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Waiting for the task: (returnval){ [ 883.780354] env[63538]: value = "task-5101118" [ 883.780354] env[63538]: _type = "Task" [ 883.780354] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.792792] env[63538]: DEBUG oslo_vmware.api [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101117, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.796412] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101118, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.187863] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63538) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 884.188386] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 7.149s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.188828] env[63538]: DEBUG oslo_concurrency.lockutils [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 48.152s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.189142] env[63538]: DEBUG oslo_concurrency.lockutils [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.191721] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 43.131s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.194231] env[63538]: INFO nova.compute.claims [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 884.197964] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 884.197964] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Cleaning up deleted instances {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 884.220657] env[63538]: INFO nova.scheduler.client.report [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Deleted allocations for instance bd222761-92aa-4f2c-a752-ead9c498ee7a [ 884.295853] env[63538]: DEBUG oslo_vmware.api [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101117, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.299778] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101118, 'name': ReconfigVM_Task, 'duration_secs': 0.39561} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.302878] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Reconfigured VM instance instance-00000042 to attach disk [datastore2] e79a9eeb-a4c4-4613-bc43-4e40103addf9/e79a9eeb-a4c4-4613-bc43-4e40103addf9.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 884.303599] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0e75775e-4e9d-43cd-92f4-917a4a61ccad {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.313860] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Waiting for the task: (returnval){ [ 884.313860] env[63538]: value = "task-5101119" [ 884.313860] env[63538]: _type = "Task" [ 884.313860] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.326601] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101119, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.713481] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] There are 46 instances to clean {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11326}} [ 884.713884] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 79f4cdd9-219a-4440-9dd2-9b2a360965b1] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 884.729822] env[63538]: DEBUG oslo_concurrency.lockutils [None req-faf0d951-7a19-4653-8711-55eae7cc5b47 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "bd222761-92aa-4f2c-a752-ead9c498ee7a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 54.333s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.790862] env[63538]: DEBUG oslo_vmware.api [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101117, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.648365} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.791552] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] edc670dd-732a-4c54-924c-c99ee539d4d9/edc670dd-732a-4c54-924c-c99ee539d4d9.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 884.791815] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 884.792117] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2ca95902-6642-4e64-af77-abe36e35711b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.800698] env[63538]: DEBUG oslo_vmware.api [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 884.800698] env[63538]: value = "task-5101120" [ 884.800698] env[63538]: _type = "Task" [ 884.800698] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.811734] env[63538]: DEBUG oslo_vmware.api [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101120, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.824974] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101119, 'name': Rename_Task, 'duration_secs': 0.173189} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.825250] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 884.825519] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c3adf8ec-a7ef-4e3e-b74b-d170c258f546 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.832503] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Waiting for the task: (returnval){ [ 884.832503] env[63538]: value = "task-5101121" [ 884.832503] env[63538]: _type = "Task" [ 884.832503] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.841971] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101121, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.220046] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 5bf7ed57-62d5-4abc-96d8-78b979baed92] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 885.319794] env[63538]: DEBUG oslo_vmware.api [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101120, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073711} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.320196] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 885.321153] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-122f3939-5cf6-4785-bd0a-52d656fe8332 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.348721] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] edc670dd-732a-4c54-924c-c99ee539d4d9/edc670dd-732a-4c54-924c-c99ee539d4d9.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 885.354812] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ecb1e232-f76f-495c-b2c1-3ad7ad99f00d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.375453] env[63538]: DEBUG oslo_vmware.api [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 885.375453] env[63538]: value = "task-5101122" [ 885.375453] env[63538]: _type = "Task" [ 885.375453] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.378653] env[63538]: DEBUG oslo_vmware.api [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101121, 'name': PowerOnVM_Task, 'duration_secs': 0.493644} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.382450] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 885.382690] env[63538]: INFO nova.compute.manager [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Took 8.93 seconds to spawn the instance on the hypervisor. [ 885.382874] env[63538]: DEBUG nova.compute.manager [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 885.386180] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a459ff94-391f-4fd8-8796-842f83b692e6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.694952] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4be3cfa-2aab-483d-8d1c-fbc9b7596b16 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.703539] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba0360da-35c8-4356-b605-f271a81fa4df {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.736632] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 6850191a-4190-4795-ae18-830b41a76085] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 885.739415] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff81c296-306f-4785-9479-b59eea78ae5d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.749099] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c1c60b7-0590-49cd-9a28-0bbed4ea4bd7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.765758] env[63538]: DEBUG nova.compute.provider_tree [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 885.889236] env[63538]: DEBUG oslo_vmware.api [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101122, 'name': ReconfigVM_Task, 'duration_secs': 0.415697} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.889695] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Reconfigured VM instance instance-00000043 to attach disk [datastore2] edc670dd-732a-4c54-924c-c99ee539d4d9/edc670dd-732a-4c54-924c-c99ee539d4d9.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 885.890471] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-024efe48-7e7d-4ad8-bc82-ac21e1941e05 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.898406] env[63538]: DEBUG oslo_vmware.api [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 885.898406] env[63538]: value = "task-5101123" [ 885.898406] env[63538]: _type = "Task" [ 885.898406] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.917296] env[63538]: DEBUG oslo_vmware.api [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101123, 'name': Rename_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.917997] env[63538]: INFO nova.compute.manager [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Took 62.88 seconds to build instance. [ 886.243318] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: f703cd1c-4b77-4a85-a91b-63a2bd0e84a9] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 886.269797] env[63538]: DEBUG nova.scheduler.client.report [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 886.408733] env[63538]: DEBUG oslo_vmware.api [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101123, 'name': Rename_Task, 'duration_secs': 0.190379} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.409095] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 886.409353] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f5a9f815-e814-462d-8c6d-936cb0d527be {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.417288] env[63538]: DEBUG oslo_vmware.api [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 886.417288] env[63538]: value = "task-5101124" [ 886.417288] env[63538]: _type = "Task" [ 886.417288] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.421129] env[63538]: DEBUG oslo_concurrency.lockutils [None req-657c6544-d705-437b-a72a-545f83456ef1 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Lock "e79a9eeb-a4c4-4613-bc43-4e40103addf9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.388s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.428162] env[63538]: DEBUG oslo_vmware.api [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101124, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.746934] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: e4b94aa7-7434-4a6e-b6d3-ed02315c435f] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 886.775148] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.583s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.776131] env[63538]: DEBUG nova.compute.manager [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 886.780476] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.857s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.780744] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.783538] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.748s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.785096] env[63538]: INFO nova.compute.claims [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 886.821739] env[63538]: INFO nova.scheduler.client.report [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Deleted allocations for instance 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1 [ 886.847665] env[63538]: DEBUG oslo_vmware.rw_handles [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a884a7-700e-b162-40f6-58c6030cbea6/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 886.848945] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f92a6d-c3d8-4095-8749-570f85df6042 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.856480] env[63538]: DEBUG oslo_vmware.rw_handles [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a884a7-700e-b162-40f6-58c6030cbea6/disk-0.vmdk is in state: ready. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 886.856684] env[63538]: ERROR oslo_vmware.rw_handles [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a884a7-700e-b162-40f6-58c6030cbea6/disk-0.vmdk due to incomplete transfer. [ 886.856972] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2231dc82-fcb7-475c-9521-ae1766a6ac38 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.867483] env[63538]: DEBUG oslo_vmware.rw_handles [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a884a7-700e-b162-40f6-58c6030cbea6/disk-0.vmdk. {{(pid=63538) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 886.867709] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Uploaded image b37dc44b-ac20-4e6d-abee-252421a4544c to the Glance image server {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 886.869876] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Destroying the VM {{(pid=63538) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 886.870176] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-72b5e00e-7523-4fc2-9acc-1cd9e566a82a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.879291] env[63538]: DEBUG oslo_vmware.api [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 886.879291] env[63538]: value = "task-5101125" [ 886.879291] env[63538]: _type = "Task" [ 886.879291] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.889172] env[63538]: DEBUG oslo_vmware.api [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101125, 'name': Destroy_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.923300] env[63538]: DEBUG nova.compute.manager [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 886.933537] env[63538]: DEBUG oslo_vmware.api [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101124, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.250748] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 46e2c1f4-edf7-45d6-ba77-c872005fcf1b] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 887.289992] env[63538]: DEBUG nova.compute.utils [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 887.296147] env[63538]: DEBUG nova.compute.manager [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 887.296147] env[63538]: DEBUG nova.network.neutron [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 887.330395] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5d7ea812-dd30-453e-912e-28ff212b29c1 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.243s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.349292] env[63538]: DEBUG nova.policy [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'df636e58e21b425ea17724bc8831cb8a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9fae9c04cde44afbb9a8295910faf2dc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 887.393019] env[63538]: DEBUG oslo_vmware.api [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101125, 'name': Destroy_Task, 'duration_secs': 0.413671} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.393019] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Destroyed the VM [ 887.393019] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Deleting Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 887.393019] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a50340aa-0af0-4aee-bbde-6e60b83d15a9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.399103] env[63538]: DEBUG oslo_vmware.api [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 887.399103] env[63538]: value = "task-5101126" [ 887.399103] env[63538]: _type = "Task" [ 887.399103] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.409808] env[63538]: DEBUG oslo_vmware.api [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101126, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.427708] env[63538]: DEBUG oslo_vmware.api [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101124, 'name': PowerOnVM_Task, 'duration_secs': 0.687551} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.427997] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 887.428264] env[63538]: INFO nova.compute.manager [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Took 8.35 seconds to spawn the instance on the hypervisor. [ 887.428476] env[63538]: DEBUG nova.compute.manager [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 887.429302] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffeccc07-9098-45fe-b7f6-a65e1bef867a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.463111] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 887.712240] env[63538]: DEBUG nova.network.neutron [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Successfully created port: 2dc178e5-2d66-4747-ae40-c03f69eba8e8 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 887.754947] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 707a79e2-f5db-479c-b719-1e040935cda3] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 887.795236] env[63538]: DEBUG nova.compute.manager [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 887.922872] env[63538]: DEBUG oslo_vmware.api [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101126, 'name': RemoveSnapshot_Task, 'duration_secs': 0.51222} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.923347] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Deleted Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 887.924115] env[63538]: INFO nova.compute.manager [None req-9cdb61b4-7717-4df1-9b95-3e230839d047 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Took 15.76 seconds to snapshot the instance on the hypervisor. [ 887.961917] env[63538]: INFO nova.compute.manager [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Took 64.12 seconds to build instance. [ 888.063930] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "8fb62f47-cbf2-4b46-bc33-845e832f9ef0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.064229] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "8fb62f47-cbf2-4b46-bc33-845e832f9ef0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.064444] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "8fb62f47-cbf2-4b46-bc33-845e832f9ef0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.064694] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "8fb62f47-cbf2-4b46-bc33-845e832f9ef0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.064909] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "8fb62f47-cbf2-4b46-bc33-845e832f9ef0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.067225] env[63538]: INFO nova.compute.manager [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Terminating instance [ 888.069315] env[63538]: DEBUG nova.compute.manager [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 888.069536] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 888.070427] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1cf8644-fbeb-4182-8c68-6c5541981cae {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.084953] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 888.085306] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a7b6f4e2-84d0-4953-9eeb-c69e403d6ab9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.095580] env[63538]: DEBUG oslo_vmware.api [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 888.095580] env[63538]: value = "task-5101127" [ 888.095580] env[63538]: _type = "Task" [ 888.095580] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.108389] env[63538]: DEBUG oslo_vmware.api [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5101127, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.260201] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 5a1202a4-d7ec-4824-ac9b-3d9dd159cb6b] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 888.369675] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-763a38a1-060d-4e29-914a-8ac917f87278 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.382022] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa7ee96-2715-4b14-a878-6a200d659483 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.417571] env[63538]: DEBUG nova.compute.manager [req-8ec77283-1d56-440b-b20e-0382c0afbbce req-c28506d2-a232-4704-9b51-4f4094dd46a3 service nova] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Received event network-changed-2e8868e8-3746-43e8-906c-20e0cd0e7336 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 888.418066] env[63538]: DEBUG nova.compute.manager [req-8ec77283-1d56-440b-b20e-0382c0afbbce req-c28506d2-a232-4704-9b51-4f4094dd46a3 service nova] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Refreshing instance network info cache due to event network-changed-2e8868e8-3746-43e8-906c-20e0cd0e7336. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 888.418349] env[63538]: DEBUG oslo_concurrency.lockutils [req-8ec77283-1d56-440b-b20e-0382c0afbbce req-c28506d2-a232-4704-9b51-4f4094dd46a3 service nova] Acquiring lock "refresh_cache-e79a9eeb-a4c4-4613-bc43-4e40103addf9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.418473] env[63538]: DEBUG oslo_concurrency.lockutils [req-8ec77283-1d56-440b-b20e-0382c0afbbce req-c28506d2-a232-4704-9b51-4f4094dd46a3 service nova] Acquired lock "refresh_cache-e79a9eeb-a4c4-4613-bc43-4e40103addf9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.418679] env[63538]: DEBUG nova.network.neutron [req-8ec77283-1d56-440b-b20e-0382c0afbbce req-c28506d2-a232-4704-9b51-4f4094dd46a3 service nova] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Refreshing network info cache for port 2e8868e8-3746-43e8-906c-20e0cd0e7336 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 888.420377] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-208ebcb7-91fe-4ae3-87f6-8a97ef0aa5e8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.435329] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce8a7f9f-b5fe-4405-8da0-c555dbada78c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.451428] env[63538]: DEBUG nova.compute.provider_tree [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 888.464519] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9c8a725d-f571-4b3b-a0a5-b1aea41c2b23 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "edc670dd-732a-4c54-924c-c99ee539d4d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.636s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.612341] env[63538]: DEBUG oslo_vmware.api [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5101127, 'name': PowerOffVM_Task, 'duration_secs': 0.299859} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.612836] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 888.613091] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 888.613481] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c10872b4-bae3-4319-bf3f-e4e4a436c514 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.702104] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 888.702538] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 888.702864] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Deleting the datastore file [datastore2] 8fb62f47-cbf2-4b46-bc33-845e832f9ef0 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 888.703397] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-630b16ce-c536-4674-92d8-dec1f52e0a73 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.714049] env[63538]: DEBUG oslo_vmware.api [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 888.714049] env[63538]: value = "task-5101129" [ 888.714049] env[63538]: _type = "Task" [ 888.714049] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.724456] env[63538]: DEBUG oslo_vmware.api [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5101129, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.764477] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: db5993ce-6982-4b82-8f5d-3fe51df8896b] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 888.771608] env[63538]: DEBUG oslo_concurrency.lockutils [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "ade3cce6-5662-4199-96f4-398436f840d8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.771874] env[63538]: DEBUG oslo_concurrency.lockutils [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "ade3cce6-5662-4199-96f4-398436f840d8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.772108] env[63538]: DEBUG oslo_concurrency.lockutils [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "ade3cce6-5662-4199-96f4-398436f840d8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.772302] env[63538]: DEBUG oslo_concurrency.lockutils [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "ade3cce6-5662-4199-96f4-398436f840d8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.772476] env[63538]: DEBUG oslo_concurrency.lockutils [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "ade3cce6-5662-4199-96f4-398436f840d8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.774549] env[63538]: INFO nova.compute.manager [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Terminating instance [ 888.776858] env[63538]: DEBUG nova.compute.manager [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 888.777105] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 888.777948] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d170ba-fe83-4e04-816c-54a8a62d8fa2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.786725] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 888.787442] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b685762a-b903-4cf4-9b45-06508ad6ab79 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.816390] env[63538]: DEBUG nova.compute.manager [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 888.845517] env[63538]: DEBUG nova.virt.hardware [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 888.845517] env[63538]: DEBUG nova.virt.hardware [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 888.845929] env[63538]: DEBUG nova.virt.hardware [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 888.845997] env[63538]: DEBUG nova.virt.hardware [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 888.846202] env[63538]: DEBUG nova.virt.hardware [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 888.846456] env[63538]: DEBUG nova.virt.hardware [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 888.846774] env[63538]: DEBUG nova.virt.hardware [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 888.847037] env[63538]: DEBUG nova.virt.hardware [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 888.847288] env[63538]: DEBUG nova.virt.hardware [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 888.847497] env[63538]: DEBUG nova.virt.hardware [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 888.847760] env[63538]: DEBUG nova.virt.hardware [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 888.848726] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a0aeaf-6623-4258-b8b4-20a0a37cdac0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.861232] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c793a5d-7c0c-4751-bc95-378d043c50da {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.869073] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 888.869539] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 888.869874] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Deleting the datastore file [datastore2] ade3cce6-5662-4199-96f4-398436f840d8 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 888.870821] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bcff54f2-a614-42e0-889d-ca68ed41fcc2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.890224] env[63538]: DEBUG oslo_vmware.api [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 888.890224] env[63538]: value = "task-5101131" [ 888.890224] env[63538]: _type = "Task" [ 888.890224] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.904387] env[63538]: DEBUG oslo_vmware.api [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101131, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.955417] env[63538]: DEBUG nova.scheduler.client.report [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 888.968987] env[63538]: DEBUG nova.compute.manager [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 889.024748] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e17ba593-10d3-4f5a-badf-20060b38f7a3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "2ab79158-dd1a-482e-9f82-9c64104e9076" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.025920] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e17ba593-10d3-4f5a-badf-20060b38f7a3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "2ab79158-dd1a-482e-9f82-9c64104e9076" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.133307] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "interface-d967631f-5c8a-42d8-ac05-4cec3bdb55cf-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.133686] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "interface-d967631f-5c8a-42d8-ac05-4cec3bdb55cf-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.134196] env[63538]: DEBUG nova.objects.instance [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lazy-loading 'flavor' on Instance uuid d967631f-5c8a-42d8-ac05-4cec3bdb55cf {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 889.226020] env[63538]: DEBUG oslo_vmware.api [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5101129, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.31908} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.226366] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 889.226590] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 889.226844] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 889.227047] env[63538]: INFO nova.compute.manager [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Took 1.16 seconds to destroy the instance on the hypervisor. [ 889.227302] env[63538]: DEBUG oslo.service.loopingcall [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 889.229911] env[63538]: DEBUG nova.compute.manager [-] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 889.229994] env[63538]: DEBUG nova.network.neutron [-] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 889.239730] env[63538]: DEBUG oslo_vmware.rw_handles [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523d7356-f11b-e01c-bd93-cb77d9a60c8b/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 889.240871] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3dfe78f-8d42-4794-a1f5-fbed80ef9496 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.248784] env[63538]: DEBUG oslo_vmware.rw_handles [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523d7356-f11b-e01c-bd93-cb77d9a60c8b/disk-0.vmdk is in state: ready. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 889.248974] env[63538]: ERROR oslo_vmware.rw_handles [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523d7356-f11b-e01c-bd93-cb77d9a60c8b/disk-0.vmdk due to incomplete transfer. [ 889.250036] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-bfbf8b6a-710f-4bad-b19c-542fb2c8b64a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.258687] env[63538]: DEBUG oslo_vmware.rw_handles [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523d7356-f11b-e01c-bd93-cb77d9a60c8b/disk-0.vmdk. {{(pid=63538) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 889.258909] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Uploaded image 5d54543a-0c8e-4827-99bc-4a46030f7731 to the Glance image server {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 889.260742] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Destroying the VM {{(pid=63538) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 889.261032] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-83d26d49-302b-4140-b4ce-66ac21c80991 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.263482] env[63538]: DEBUG nova.network.neutron [req-8ec77283-1d56-440b-b20e-0382c0afbbce req-c28506d2-a232-4704-9b51-4f4094dd46a3 service nova] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Updated VIF entry in instance network info cache for port 2e8868e8-3746-43e8-906c-20e0cd0e7336. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 889.263831] env[63538]: DEBUG nova.network.neutron [req-8ec77283-1d56-440b-b20e-0382c0afbbce req-c28506d2-a232-4704-9b51-4f4094dd46a3 service nova] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Updating instance_info_cache with network_info: [{"id": "2e8868e8-3746-43e8-906c-20e0cd0e7336", "address": "fa:16:3e:4c:ad:86", "network": {"id": "8497b0d5-d275-4479-ae83-3ef4a1bb795b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-849919494-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3f6e933bf6c4e71af3b2a1e02d6e42f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ee018eb-75be-4037-a80a-07034d4eae35", "external-id": "nsx-vlan-transportzone-8", "segmentation_id": 8, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e8868e8-37", "ovs_interfaceid": "2e8868e8-3746-43e8-906c-20e0cd0e7336", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.268594] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 04dc612b-7987-405b-9716-95c4ff3535ec] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 889.275022] env[63538]: DEBUG oslo_vmware.api [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 889.275022] env[63538]: value = "task-5101132" [ 889.275022] env[63538]: _type = "Task" [ 889.275022] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.286181] env[63538]: DEBUG oslo_vmware.api [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101132, 'name': Destroy_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.402074] env[63538]: DEBUG oslo_vmware.api [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101131, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.426914} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.402507] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 889.402852] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 889.403150] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 889.403481] env[63538]: INFO nova.compute.manager [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Took 0.63 seconds to destroy the instance on the hypervisor. [ 889.403890] env[63538]: DEBUG oslo.service.loopingcall [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 889.404195] env[63538]: DEBUG nova.compute.manager [-] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 889.404351] env[63538]: DEBUG nova.network.neutron [-] [instance: ade3cce6-5662-4199-96f4-398436f840d8] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 889.462092] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.678s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.462679] env[63538]: DEBUG nova.compute.manager [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 889.466557] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.699s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.468628] env[63538]: INFO nova.compute.claims [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 889.500549] env[63538]: DEBUG oslo_concurrency.lockutils [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.766672] env[63538]: DEBUG nova.objects.instance [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lazy-loading 'pci_requests' on Instance uuid d967631f-5c8a-42d8-ac05-4cec3bdb55cf {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 889.768924] env[63538]: DEBUG oslo_concurrency.lockutils [req-8ec77283-1d56-440b-b20e-0382c0afbbce req-c28506d2-a232-4704-9b51-4f4094dd46a3 service nova] Releasing lock "refresh_cache-e79a9eeb-a4c4-4613-bc43-4e40103addf9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.774312] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: e50e95c0-830b-4d71-999b-546b138bf8f4] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 889.795854] env[63538]: DEBUG oslo_vmware.api [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101132, 'name': Destroy_Task} progress is 33%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.950909] env[63538]: DEBUG nova.network.neutron [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Successfully updated port: 2dc178e5-2d66-4747-ae40-c03f69eba8e8 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 889.974427] env[63538]: DEBUG nova.compute.utils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 889.980846] env[63538]: DEBUG nova.compute.manager [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 889.981184] env[63538]: DEBUG nova.network.neutron [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 890.052191] env[63538]: DEBUG nova.policy [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '577a52928adf4587b963772b31a378cc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5d463d24e41b421eb7cb9d51ad207495', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 890.255684] env[63538]: DEBUG nova.network.neutron [-] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.271079] env[63538]: DEBUG nova.objects.base [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=63538) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 890.271174] env[63538]: DEBUG nova.network.neutron [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 890.285410] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 736b110e-7265-42cc-9c9b-35f57c466b0c] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 890.301075] env[63538]: DEBUG oslo_vmware.api [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101132, 'name': Destroy_Task, 'duration_secs': 0.789422} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.301075] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Destroyed the VM [ 890.301270] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Deleting Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 890.301786] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-655d1b6b-3054-40c8-9294-1709660af65f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.311219] env[63538]: DEBUG oslo_vmware.api [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 890.311219] env[63538]: value = "task-5101133" [ 890.311219] env[63538]: _type = "Task" [ 890.311219] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.325363] env[63538]: DEBUG oslo_vmware.api [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101133, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.343969] env[63538]: DEBUG nova.network.neutron [-] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.435748] env[63538]: DEBUG nova.policy [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ebe77c0e32ce4a32b290ba5088e107f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7063c42297c24f2baf7271fa25dec927', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 890.454749] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Acquiring lock "refresh_cache-b0b4ae9c-95d3-47a1-86a7-120c88b60704" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.454886] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Acquired lock "refresh_cache-b0b4ae9c-95d3-47a1-86a7-120c88b60704" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.455662] env[63538]: DEBUG nova.network.neutron [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 890.481643] env[63538]: DEBUG nova.compute.manager [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 890.747080] env[63538]: DEBUG nova.network.neutron [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Successfully created port: ed76faed-4b61-4cd6-833c-46bbb80f49e3 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 890.758842] env[63538]: INFO nova.compute.manager [-] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Took 1.35 seconds to deallocate network for instance. [ 890.790164] env[63538]: DEBUG nova.compute.manager [req-6725d6a6-9a2b-4fc4-97c8-bbaa3cc5421b req-a4c79c91-d9b2-45c0-844d-a994a50870a9 service nova] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Received event network-changed-2e8868e8-3746-43e8-906c-20e0cd0e7336 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 890.790482] env[63538]: DEBUG nova.compute.manager [req-6725d6a6-9a2b-4fc4-97c8-bbaa3cc5421b req-a4c79c91-d9b2-45c0-844d-a994a50870a9 service nova] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Refreshing instance network info cache due to event network-changed-2e8868e8-3746-43e8-906c-20e0cd0e7336. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 890.790693] env[63538]: DEBUG oslo_concurrency.lockutils [req-6725d6a6-9a2b-4fc4-97c8-bbaa3cc5421b req-a4c79c91-d9b2-45c0-844d-a994a50870a9 service nova] Acquiring lock "refresh_cache-e79a9eeb-a4c4-4613-bc43-4e40103addf9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.790843] env[63538]: DEBUG oslo_concurrency.lockutils [req-6725d6a6-9a2b-4fc4-97c8-bbaa3cc5421b req-a4c79c91-d9b2-45c0-844d-a994a50870a9 service nova] Acquired lock "refresh_cache-e79a9eeb-a4c4-4613-bc43-4e40103addf9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.791020] env[63538]: DEBUG nova.network.neutron [req-6725d6a6-9a2b-4fc4-97c8-bbaa3cc5421b req-a4c79c91-d9b2-45c0-844d-a994a50870a9 service nova] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Refreshing network info cache for port 2e8868e8-3746-43e8-906c-20e0cd0e7336 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 890.801805] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 4e89aa25-fb4a-430d-ab87-feff57b73780] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 890.830223] env[63538]: DEBUG oslo_vmware.api [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101133, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.846653] env[63538]: INFO nova.compute.manager [-] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Took 1.61 seconds to deallocate network for instance. [ 890.953976] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "90e56075-0d77-467f-90be-913315b63b33" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.953976] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "90e56075-0d77-467f-90be-913315b63b33" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.007149] env[63538]: DEBUG nova.network.neutron [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 891.089530] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7146f4a-5308-4c8b-a3ef-cac08742d55f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.104292] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb34e27-de41-435d-98a8-89647a6c4b9d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.145106] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cd67a3e-4c3f-4318-8c99-8f45c041c24c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.155090] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0cf37d3-7d14-4b87-a7d0-f93359b31ebc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.171124] env[63538]: DEBUG nova.compute.provider_tree [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 891.186482] env[63538]: DEBUG nova.network.neutron [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Updating instance_info_cache with network_info: [{"id": "2dc178e5-2d66-4747-ae40-c03f69eba8e8", "address": "fa:16:3e:58:4d:69", "network": {"id": "9c8a7880-0663-49c8-8cc7-df517ad2ab08", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1858496628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fae9c04cde44afbb9a8295910faf2dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dc178e5-2d", "ovs_interfaceid": "2dc178e5-2d66-4747-ae40-c03f69eba8e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.233708] env[63538]: DEBUG nova.network.neutron [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Successfully created port: 4c96df2c-ae17-49fe-84c1-d86dd4b46eb6 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 891.267320] env[63538]: DEBUG oslo_concurrency.lockutils [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.308533] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: bb56950a-3e25-4fb9-9f84-f735e26adc42] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 891.325131] env[63538]: DEBUG oslo_vmware.api [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101133, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.352579] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.496286] env[63538]: DEBUG nova.compute.manager [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 891.523738] env[63538]: DEBUG nova.virt.hardware [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 891.523990] env[63538]: DEBUG nova.virt.hardware [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 891.524171] env[63538]: DEBUG nova.virt.hardware [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 891.524363] env[63538]: DEBUG nova.virt.hardware [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 891.524514] env[63538]: DEBUG nova.virt.hardware [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 891.524712] env[63538]: DEBUG nova.virt.hardware [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 891.524982] env[63538]: DEBUG nova.virt.hardware [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 891.525188] env[63538]: DEBUG nova.virt.hardware [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 891.525404] env[63538]: DEBUG nova.virt.hardware [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 891.525606] env[63538]: DEBUG nova.virt.hardware [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 891.525814] env[63538]: DEBUG nova.virt.hardware [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 891.529175] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6039eaf-079e-435a-8958-b386b71e5cfa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.536728] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-528748b3-7b11-4d84-9991-ea94a3137cd4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.572969] env[63538]: DEBUG nova.network.neutron [req-6725d6a6-9a2b-4fc4-97c8-bbaa3cc5421b req-a4c79c91-d9b2-45c0-844d-a994a50870a9 service nova] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Updated VIF entry in instance network info cache for port 2e8868e8-3746-43e8-906c-20e0cd0e7336. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 891.573380] env[63538]: DEBUG nova.network.neutron [req-6725d6a6-9a2b-4fc4-97c8-bbaa3cc5421b req-a4c79c91-d9b2-45c0-844d-a994a50870a9 service nova] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Updating instance_info_cache with network_info: [{"id": "2e8868e8-3746-43e8-906c-20e0cd0e7336", "address": "fa:16:3e:4c:ad:86", "network": {"id": "8497b0d5-d275-4479-ae83-3ef4a1bb795b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-849919494-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3f6e933bf6c4e71af3b2a1e02d6e42f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ee018eb-75be-4037-a80a-07034d4eae35", "external-id": "nsx-vlan-transportzone-8", "segmentation_id": 8, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e8868e8-37", "ovs_interfaceid": "2e8868e8-3746-43e8-906c-20e0cd0e7336", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.691643] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Releasing lock "refresh_cache-b0b4ae9c-95d3-47a1-86a7-120c88b60704" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 891.692008] env[63538]: DEBUG nova.compute.manager [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Instance network_info: |[{"id": "2dc178e5-2d66-4747-ae40-c03f69eba8e8", "address": "fa:16:3e:58:4d:69", "network": {"id": "9c8a7880-0663-49c8-8cc7-df517ad2ab08", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1858496628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fae9c04cde44afbb9a8295910faf2dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dc178e5-2d", "ovs_interfaceid": "2dc178e5-2d66-4747-ae40-c03f69eba8e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 891.692481] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:4d:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca50cd14-9e1f-4d74-a066-e5a45ba0ce22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2dc178e5-2d66-4747-ae40-c03f69eba8e8', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 891.700099] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Creating folder: Project (9fae9c04cde44afbb9a8295910faf2dc). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 891.701268] env[63538]: ERROR nova.scheduler.client.report [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [req-297d72f5-d8d1-4056-a89e-8e011d2e67ca] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f65218a4-1d3d-476a-9093-01cae92c8635. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-297d72f5-d8d1-4056-a89e-8e011d2e67ca"}]} [ 891.701623] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a27b4295-fb67-4616-812d-205776eaf431 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.717375] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Created folder: Project (9fae9c04cde44afbb9a8295910faf2dc) in parent group-v992234. [ 891.717656] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Creating folder: Instances. Parent ref: group-v992421. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 891.717850] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-43d1cce1-3b0c-405a-9297-c14ad3e8584f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.720652] env[63538]: DEBUG nova.scheduler.client.report [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Refreshing inventories for resource provider f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 891.735405] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Created folder: Instances in parent group-v992421. [ 891.735660] env[63538]: DEBUG oslo.service.loopingcall [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 891.735969] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 891.736062] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2082e83a-296d-40d6-9e16-806ce3049192 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.751580] env[63538]: DEBUG nova.scheduler.client.report [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Updating ProviderTree inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 891.751807] env[63538]: DEBUG nova.compute.provider_tree [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 891.760030] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 891.760030] env[63538]: value = "task-5101136" [ 891.760030] env[63538]: _type = "Task" [ 891.760030] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.765179] env[63538]: DEBUG nova.scheduler.client.report [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Refreshing aggregate associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, aggregates: None {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 891.771027] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101136, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.786388] env[63538]: DEBUG nova.scheduler.client.report [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Refreshing trait associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 891.811824] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 080b11d7-a756-45a0-81d5-b5fcc2662ac9] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 891.826383] env[63538]: DEBUG oslo_vmware.api [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101133, 'name': RemoveSnapshot_Task, 'duration_secs': 1.232434} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.826383] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Deleted Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 891.826383] env[63538]: INFO nova.compute.manager [None req-d403988c-1fd9-408a-85d9-94a8421dafc4 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Took 14.71 seconds to snapshot the instance on the hypervisor. [ 892.077409] env[63538]: DEBUG oslo_concurrency.lockutils [req-6725d6a6-9a2b-4fc4-97c8-bbaa3cc5421b req-a4c79c91-d9b2-45c0-844d-a994a50870a9 service nova] Releasing lock "refresh_cache-e79a9eeb-a4c4-4613-bc43-4e40103addf9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.221664] env[63538]: DEBUG nova.compute.manager [req-cdf245da-6519-490c-896b-27d796ffd32b req-a1fe6f0b-6615-40f5-892b-44f96ef202ac service nova] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Received event network-vif-deleted-a4df30fe-8921-4a26-89d6-e478ba9e2608 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 892.240225] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d222e8e1-2745-485e-87a2-e79079ee9541 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.252639] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d837529-18e2-4501-94ef-cc54f7ddae46 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.304584] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed1b0467-dc48-49e5-b68b-642faba65051 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.310824] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101136, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.317578] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bbfd2fc-6af3-4d92-acec-baa0af9663e5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.323853] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 6f29f063-ddb5-491a-a1a0-7c9ed65a1718] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 892.340082] env[63538]: DEBUG nova.compute.provider_tree [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 892.771333] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101136, 'name': CreateVM_Task, 'duration_secs': 0.596102} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.771658] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 892.772230] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.772479] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.772750] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 892.773114] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-451c3c22-7984-4598-bd15-a5268c01e7cc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.778137] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Waiting for the task: (returnval){ [ 892.778137] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52867b1c-cc3a-3c53-9829-fcc5753a430f" [ 892.778137] env[63538]: _type = "Task" [ 892.778137] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.786789] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52867b1c-cc3a-3c53-9829-fcc5753a430f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.827224] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: fd650fdc-6b49-4051-8267-bbd1f0cb86f1] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 892.879666] env[63538]: DEBUG nova.scheduler.client.report [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Updated inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 with generation 97 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 892.880083] env[63538]: DEBUG nova.compute.provider_tree [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Updating resource provider f65218a4-1d3d-476a-9093-01cae92c8635 generation from 97 to 98 during operation: update_inventory {{(pid=63538) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 892.880303] env[63538]: DEBUG nova.compute.provider_tree [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 893.051956] env[63538]: DEBUG nova.network.neutron [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Successfully updated port: ed76faed-4b61-4cd6-833c-46bbb80f49e3 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 893.234628] env[63538]: DEBUG nova.network.neutron [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Successfully updated port: 4c96df2c-ae17-49fe-84c1-d86dd4b46eb6 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 893.289732] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52867b1c-cc3a-3c53-9829-fcc5753a430f, 'name': SearchDatastore_Task, 'duration_secs': 0.01095} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.290121] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.290388] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 893.290637] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.290791] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.291051] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 893.291363] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c5effbe4-918b-4241-8d6c-fd3dff6c7dd1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.303552] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 893.303646] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 893.304582] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9e3372c-862b-4749-bd73-01125e4a67c8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.312589] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Waiting for the task: (returnval){ [ 893.312589] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e922a5-67c2-d128-3229-792a004474c5" [ 893.312589] env[63538]: _type = "Task" [ 893.312589] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.325901] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e922a5-67c2-d128-3229-792a004474c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.330903] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 1a4309cf-7d02-4351-8fc2-7ce2bc0cb1f7] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 893.385436] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.919s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.385998] env[63538]: DEBUG nova.compute.manager [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 893.388835] env[63538]: DEBUG oslo_concurrency.lockutils [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.631s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.389057] env[63538]: DEBUG oslo_concurrency.lockutils [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.392800] env[63538]: DEBUG oslo_concurrency.lockutils [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.390s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.393948] env[63538]: INFO nova.compute.claims [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 893.418602] env[63538]: INFO nova.scheduler.client.report [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Deleted allocations for instance f9fa5578-acf3-416f-9cb0-8ceb00e5132d [ 893.554729] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "refresh_cache-1db1d558-2473-49cb-b309-f7192bd6b9c1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.554729] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquired lock "refresh_cache-1db1d558-2473-49cb-b309-f7192bd6b9c1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.554911] env[63538]: DEBUG nova.network.neutron [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 893.737041] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "refresh_cache-d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.737181] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "refresh_cache-d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.737296] env[63538]: DEBUG nova.network.neutron [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 893.824064] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e922a5-67c2-d128-3229-792a004474c5, 'name': SearchDatastore_Task, 'duration_secs': 0.012695} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.824879] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d504763-0816-4f78-aac5-a7bee22ae0ca {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.831946] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Waiting for the task: (returnval){ [ 893.831946] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526140c1-814e-b97e-7195-2cb2d79bef46" [ 893.831946] env[63538]: _type = "Task" [ 893.831946] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.835471] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 61068d41-5f5d-4ee5-b546-71da13eff93d] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 893.843518] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526140c1-814e-b97e-7195-2cb2d79bef46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.902210] env[63538]: DEBUG nova.compute.utils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 893.903703] env[63538]: DEBUG nova.compute.manager [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 893.904518] env[63538]: DEBUG nova.network.neutron [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 893.930017] env[63538]: DEBUG oslo_concurrency.lockutils [None req-184ea41e-e2f4-410d-8eca-6d73d991f50c tempest-ServersNegativeTestJSON-811223485 tempest-ServersNegativeTestJSON-811223485-project-member] Lock "f9fa5578-acf3-416f-9cb0-8ceb00e5132d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.375s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.992058] env[63538]: DEBUG nova.policy [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '577a52928adf4587b963772b31a378cc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5d463d24e41b421eb7cb9d51ad207495', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 894.113329] env[63538]: DEBUG nova.network.neutron [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 894.126498] env[63538]: DEBUG nova.compute.manager [req-a0471025-6105-43e1-ada6-22ef8489058b req-da03c996-f129-4aed-985d-e2753bd63653 service nova] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Received event network-vif-plugged-2dc178e5-2d66-4747-ae40-c03f69eba8e8 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 894.126728] env[63538]: DEBUG oslo_concurrency.lockutils [req-a0471025-6105-43e1-ada6-22ef8489058b req-da03c996-f129-4aed-985d-e2753bd63653 service nova] Acquiring lock "b0b4ae9c-95d3-47a1-86a7-120c88b60704-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.126946] env[63538]: DEBUG oslo_concurrency.lockutils [req-a0471025-6105-43e1-ada6-22ef8489058b req-da03c996-f129-4aed-985d-e2753bd63653 service nova] Lock "b0b4ae9c-95d3-47a1-86a7-120c88b60704-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.127178] env[63538]: DEBUG oslo_concurrency.lockutils [req-a0471025-6105-43e1-ada6-22ef8489058b req-da03c996-f129-4aed-985d-e2753bd63653 service nova] Lock "b0b4ae9c-95d3-47a1-86a7-120c88b60704-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.127360] env[63538]: DEBUG nova.compute.manager [req-a0471025-6105-43e1-ada6-22ef8489058b req-da03c996-f129-4aed-985d-e2753bd63653 service nova] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] No waiting events found dispatching network-vif-plugged-2dc178e5-2d66-4747-ae40-c03f69eba8e8 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 894.127593] env[63538]: WARNING nova.compute.manager [req-a0471025-6105-43e1-ada6-22ef8489058b req-da03c996-f129-4aed-985d-e2753bd63653 service nova] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Received unexpected event network-vif-plugged-2dc178e5-2d66-4747-ae40-c03f69eba8e8 for instance with vm_state building and task_state spawning. [ 894.127747] env[63538]: DEBUG nova.compute.manager [req-a0471025-6105-43e1-ada6-22ef8489058b req-da03c996-f129-4aed-985d-e2753bd63653 service nova] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Received event network-vif-deleted-5b823d56-3162-4875-a202-4526e8d9e433 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 894.127930] env[63538]: DEBUG nova.compute.manager [req-a0471025-6105-43e1-ada6-22ef8489058b req-da03c996-f129-4aed-985d-e2753bd63653 service nova] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Received event network-changed-2dc178e5-2d66-4747-ae40-c03f69eba8e8 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 894.128093] env[63538]: DEBUG nova.compute.manager [req-a0471025-6105-43e1-ada6-22ef8489058b req-da03c996-f129-4aed-985d-e2753bd63653 service nova] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Refreshing instance network info cache due to event network-changed-2dc178e5-2d66-4747-ae40-c03f69eba8e8. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 894.128290] env[63538]: DEBUG oslo_concurrency.lockutils [req-a0471025-6105-43e1-ada6-22ef8489058b req-da03c996-f129-4aed-985d-e2753bd63653 service nova] Acquiring lock "refresh_cache-b0b4ae9c-95d3-47a1-86a7-120c88b60704" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.128901] env[63538]: DEBUG oslo_concurrency.lockutils [req-a0471025-6105-43e1-ada6-22ef8489058b req-da03c996-f129-4aed-985d-e2753bd63653 service nova] Acquired lock "refresh_cache-b0b4ae9c-95d3-47a1-86a7-120c88b60704" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.128901] env[63538]: DEBUG nova.network.neutron [req-a0471025-6105-43e1-ada6-22ef8489058b req-da03c996-f129-4aed-985d-e2753bd63653 service nova] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Refreshing network info cache for port 2dc178e5-2d66-4747-ae40-c03f69eba8e8 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 894.327011] env[63538]: WARNING nova.network.neutron [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] 44a04a43-a979-4648-89b2-63323df5b0f3 already exists in list: networks containing: ['44a04a43-a979-4648-89b2-63323df5b0f3']. ignoring it [ 894.340110] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 99de5226-a27c-47c5-90fa-5f0c7204df1c] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 894.347808] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526140c1-814e-b97e-7195-2cb2d79bef46, 'name': SearchDatastore_Task, 'duration_secs': 0.01523} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.349829] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.349829] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] b0b4ae9c-95d3-47a1-86a7-120c88b60704/b0b4ae9c-95d3-47a1-86a7-120c88b60704.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 894.351049] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bb4ccda8-62a0-42bc-8945-1b6b3c173cb2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.362030] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Waiting for the task: (returnval){ [ 894.362030] env[63538]: value = "task-5101137" [ 894.362030] env[63538]: _type = "Task" [ 894.362030] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.374282] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Task: {'id': task-5101137, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.408208] env[63538]: DEBUG nova.compute.manager [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 894.418322] env[63538]: DEBUG nova.network.neutron [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Updating instance_info_cache with network_info: [{"id": "ed76faed-4b61-4cd6-833c-46bbb80f49e3", "address": "fa:16:3e:71:77:a9", "network": {"id": "106e253d-ba43-4eb6-a423-ed6a6d4156aa", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-330603874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d463d24e41b421eb7cb9d51ad207495", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped76faed-4b", "ovs_interfaceid": "ed76faed-4b61-4cd6-833c-46bbb80f49e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.554461] env[63538]: DEBUG nova.network.neutron [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Successfully created port: e650c787-b98c-4e3d-aa14-1a81d82c2c8b {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 894.673627] env[63538]: DEBUG nova.compute.manager [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 894.674615] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01237478-96c6-498e-a9f4-f5f56d0ce521 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.849761] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 4b95b9dd-63ce-4285-ae28-f0c2a7fe6eff] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 894.881956] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Task: {'id': task-5101137, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.921792] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Releasing lock "refresh_cache-1db1d558-2473-49cb-b309-f7192bd6b9c1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.922300] env[63538]: DEBUG nova.compute.manager [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Instance network_info: |[{"id": "ed76faed-4b61-4cd6-833c-46bbb80f49e3", "address": "fa:16:3e:71:77:a9", "network": {"id": "106e253d-ba43-4eb6-a423-ed6a6d4156aa", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-330603874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d463d24e41b421eb7cb9d51ad207495", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped76faed-4b", "ovs_interfaceid": "ed76faed-4b61-4cd6-833c-46bbb80f49e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 894.923250] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:77:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bec903a9-d773-4d7c-a80c-c2533be346fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ed76faed-4b61-4cd6-833c-46bbb80f49e3', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 894.933437] env[63538]: DEBUG oslo.service.loopingcall [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 894.934813] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 894.934913] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-41338e65-2723-4892-8a4c-eb3350831dd7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.963325] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 894.963325] env[63538]: value = "task-5101138" [ 894.963325] env[63538]: _type = "Task" [ 894.963325] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.977685] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101138, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.995650] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3423ada-106b-48d2-925a-ad05366c851e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.005240] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36cdfc02-fd59-467c-9780-bb7b042bad79 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.013197] env[63538]: DEBUG nova.network.neutron [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Updating instance_info_cache with network_info: [{"id": "696fc25d-fa83-4793-bffa-6bd2ce56f489", "address": "fa:16:3e:60:55:b3", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.154", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap696fc25d-fa", "ovs_interfaceid": "696fc25d-fa83-4793-bffa-6bd2ce56f489", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4c96df2c-ae17-49fe-84c1-d86dd4b46eb6", "address": "fa:16:3e:1e:f2:d9", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c96df2c-ae", "ovs_interfaceid": "4c96df2c-ae17-49fe-84c1-d86dd4b46eb6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.050552] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "refresh_cache-d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.050979] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.051299] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.052693] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973a8bf5-622e-44ab-8365-1805a7aa073f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.057847] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-000d04c9-725a-48ff-84ce-527fb5865ed3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.096516] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3f54d7b-fe64-4ec2-984e-11bbbcb2ec25 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.104400] env[63538]: DEBUG nova.virt.hardware [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 895.104839] env[63538]: DEBUG nova.virt.hardware [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 895.105131] env[63538]: DEBUG nova.virt.hardware [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 895.105455] env[63538]: DEBUG nova.virt.hardware [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 895.105726] env[63538]: DEBUG nova.virt.hardware [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 895.106011] env[63538]: DEBUG nova.virt.hardware [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 895.106710] env[63538]: DEBUG nova.virt.hardware [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 895.107033] env[63538]: DEBUG nova.virt.hardware [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 895.107504] env[63538]: DEBUG nova.virt.hardware [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 895.107664] env[63538]: DEBUG nova.virt.hardware [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 895.111026] env[63538]: DEBUG nova.virt.hardware [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 895.119027] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Reconfiguring VM to attach interface {{(pid=63538) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1929}} [ 895.119027] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-60a1980f-15e8-4627-86f4-6b11ada23876 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.141307] env[63538]: DEBUG nova.compute.provider_tree [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 895.143965] env[63538]: DEBUG oslo_vmware.api [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 895.143965] env[63538]: value = "task-5101139" [ 895.143965] env[63538]: _type = "Task" [ 895.143965] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.150198] env[63538]: DEBUG nova.network.neutron [req-a0471025-6105-43e1-ada6-22ef8489058b req-da03c996-f129-4aed-985d-e2753bd63653 service nova] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Updated VIF entry in instance network info cache for port 2dc178e5-2d66-4747-ae40-c03f69eba8e8. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 895.150198] env[63538]: DEBUG nova.network.neutron [req-a0471025-6105-43e1-ada6-22ef8489058b req-da03c996-f129-4aed-985d-e2753bd63653 service nova] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Updating instance_info_cache with network_info: [{"id": "2dc178e5-2d66-4747-ae40-c03f69eba8e8", "address": "fa:16:3e:58:4d:69", "network": {"id": "9c8a7880-0663-49c8-8cc7-df517ad2ab08", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1858496628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fae9c04cde44afbb9a8295910faf2dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dc178e5-2d", "ovs_interfaceid": "2dc178e5-2d66-4747-ae40-c03f69eba8e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.153662] env[63538]: DEBUG oslo_vmware.api [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101139, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.198349] env[63538]: INFO nova.compute.manager [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] instance snapshotting [ 895.203018] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e98e0aa5-6d14-4bcb-a095-34684e4e9ed1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.223664] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a7c3152-fb7f-4ac8-afdd-ad20acb12b0d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.353263] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: a7bb1869-5553-40d8-9c0b-366ccdef5fae] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 895.380426] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "2e97b357-0200-4aed-9705-dd7808f853ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.380702] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "2e97b357-0200-4aed-9705-dd7808f853ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.381038] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Task: {'id': task-5101137, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.553476} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.382014] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] b0b4ae9c-95d3-47a1-86a7-120c88b60704/b0b4ae9c-95d3-47a1-86a7-120c88b60704.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 895.382014] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 895.382014] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d40d36c1-dc0e-41ec-84ab-75d6e2516d2e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.392191] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Waiting for the task: (returnval){ [ 895.392191] env[63538]: value = "task-5101140" [ 895.392191] env[63538]: _type = "Task" [ 895.392191] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.404338] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Task: {'id': task-5101140, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.425106] env[63538]: DEBUG nova.compute.manager [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 895.477591] env[63538]: DEBUG nova.virt.hardware [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 895.477781] env[63538]: DEBUG nova.virt.hardware [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 895.477907] env[63538]: DEBUG nova.virt.hardware [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 895.478115] env[63538]: DEBUG nova.virt.hardware [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 895.478396] env[63538]: DEBUG nova.virt.hardware [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 895.478621] env[63538]: DEBUG nova.virt.hardware [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 895.478874] env[63538]: DEBUG nova.virt.hardware [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 895.479089] env[63538]: DEBUG nova.virt.hardware [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 895.479305] env[63538]: DEBUG nova.virt.hardware [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 895.479825] env[63538]: DEBUG nova.virt.hardware [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 895.480119] env[63538]: DEBUG nova.virt.hardware [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 895.481289] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c1bdea-fcdb-4a89-bebc-5b446948f4d9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.491109] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101138, 'name': CreateVM_Task, 'duration_secs': 0.526634} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.493661] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 895.494810] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.494995] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.495359] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 895.496876] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-308aeec6-9e59-4c53-b4d6-32842a5a73a1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.502567] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da4492de-b058-4784-8f3c-7d35ae3acded {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.510384] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 895.510384] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526e6eea-0e42-3a29-4940-c728a9f28081" [ 895.510384] env[63538]: _type = "Task" [ 895.510384] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.529812] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526e6eea-0e42-3a29-4940-c728a9f28081, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.647538] env[63538]: DEBUG nova.scheduler.client.report [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 895.654610] env[63538]: DEBUG oslo_concurrency.lockutils [req-a0471025-6105-43e1-ada6-22ef8489058b req-da03c996-f129-4aed-985d-e2753bd63653 service nova] Releasing lock "refresh_cache-b0b4ae9c-95d3-47a1-86a7-120c88b60704" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.656111] env[63538]: DEBUG nova.compute.manager [req-a0471025-6105-43e1-ada6-22ef8489058b req-da03c996-f129-4aed-985d-e2753bd63653 service nova] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Received event network-vif-plugged-ed76faed-4b61-4cd6-833c-46bbb80f49e3 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 895.656111] env[63538]: DEBUG oslo_concurrency.lockutils [req-a0471025-6105-43e1-ada6-22ef8489058b req-da03c996-f129-4aed-985d-e2753bd63653 service nova] Acquiring lock "1db1d558-2473-49cb-b309-f7192bd6b9c1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.656111] env[63538]: DEBUG oslo_concurrency.lockutils [req-a0471025-6105-43e1-ada6-22ef8489058b req-da03c996-f129-4aed-985d-e2753bd63653 service nova] Lock "1db1d558-2473-49cb-b309-f7192bd6b9c1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.656111] env[63538]: DEBUG oslo_concurrency.lockutils [req-a0471025-6105-43e1-ada6-22ef8489058b req-da03c996-f129-4aed-985d-e2753bd63653 service nova] Lock "1db1d558-2473-49cb-b309-f7192bd6b9c1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.656111] env[63538]: DEBUG nova.compute.manager [req-a0471025-6105-43e1-ada6-22ef8489058b req-da03c996-f129-4aed-985d-e2753bd63653 service nova] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] No waiting events found dispatching network-vif-plugged-ed76faed-4b61-4cd6-833c-46bbb80f49e3 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 895.656111] env[63538]: WARNING nova.compute.manager [req-a0471025-6105-43e1-ada6-22ef8489058b req-da03c996-f129-4aed-985d-e2753bd63653 service nova] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Received unexpected event network-vif-plugged-ed76faed-4b61-4cd6-833c-46bbb80f49e3 for instance with vm_state building and task_state spawning. [ 895.661196] env[63538]: DEBUG oslo_vmware.api [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101139, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.736332] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Creating Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 895.736698] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ff97fd75-ad41-4b6a-b56e-5a0c6cfd850f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.745828] env[63538]: DEBUG oslo_vmware.api [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 895.745828] env[63538]: value = "task-5101141" [ 895.745828] env[63538]: _type = "Task" [ 895.745828] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.755994] env[63538]: DEBUG oslo_vmware.api [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101141, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.858190] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 43729260-d138-4e62-9cc5-4db3ca39f5d2] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 895.907918] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Task: {'id': task-5101140, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.116154} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.908461] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 895.909741] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21e45f06-7ab9-49aa-8ef3-4c40ce9fa348 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.950355] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] b0b4ae9c-95d3-47a1-86a7-120c88b60704/b0b4ae9c-95d3-47a1-86a7-120c88b60704.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 895.950355] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0864ea7-0f55-4aca-ae46-72fd95081248 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.976750] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Waiting for the task: (returnval){ [ 895.976750] env[63538]: value = "task-5101142" [ 895.976750] env[63538]: _type = "Task" [ 895.976750] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.985685] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Task: {'id': task-5101142, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.034026] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526e6eea-0e42-3a29-4940-c728a9f28081, 'name': SearchDatastore_Task, 'duration_secs': 0.024522} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.034026] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.034026] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 896.034026] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.034026] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.034467] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 896.035158] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d08a1b47-701b-41c5-923b-84cecea5610d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.049048] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 896.049048] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 896.049048] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f6313d8-81d1-4af9-93f5-82b190bded4a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.055502] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 896.055502] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520ccf66-c951-cc48-0477-6413f6fc1b43" [ 896.055502] env[63538]: _type = "Task" [ 896.055502] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.066490] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520ccf66-c951-cc48-0477-6413f6fc1b43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.158022] env[63538]: DEBUG oslo_concurrency.lockutils [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.763s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.158022] env[63538]: DEBUG nova.compute.manager [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 896.159713] env[63538]: DEBUG oslo_concurrency.lockutils [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.489s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.160056] env[63538]: DEBUG oslo_concurrency.lockutils [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.162953] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.769s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.165034] env[63538]: INFO nova.compute.claims [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 896.174748] env[63538]: DEBUG oslo_vmware.api [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101139, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.208758] env[63538]: INFO nova.scheduler.client.report [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Deleted allocations for instance 0e718984-cfce-4620-9be6-fdcfb4954da8 [ 896.264026] env[63538]: DEBUG oslo_vmware.api [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101141, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.363467] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 65fc18ff-8901-40d2-8a5b-640eb9768240] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 896.485689] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Task: {'id': task-5101142, 'name': ReconfigVM_Task, 'duration_secs': 0.45946} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.486012] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Reconfigured VM instance instance-00000044 to attach disk [datastore1] b0b4ae9c-95d3-47a1-86a7-120c88b60704/b0b4ae9c-95d3-47a1-86a7-120c88b60704.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 896.486657] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-731deeae-3fc3-4922-8e82-ebdc5bff2bc0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.495085] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Waiting for the task: (returnval){ [ 896.495085] env[63538]: value = "task-5101143" [ 896.495085] env[63538]: _type = "Task" [ 896.495085] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.507264] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Task: {'id': task-5101143, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.566987] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520ccf66-c951-cc48-0477-6413f6fc1b43, 'name': SearchDatastore_Task, 'duration_secs': 0.015501} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.567853] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2773a8ca-3701-4386-ae86-e88ce3c82660 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.573806] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 896.573806] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527bd475-d3aa-c7b3-3002-214e8a5e206f" [ 896.573806] env[63538]: _type = "Task" [ 896.573806] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.583139] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527bd475-d3aa-c7b3-3002-214e8a5e206f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.665167] env[63538]: DEBUG oslo_vmware.api [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101139, 'name': ReconfigVM_Task, 'duration_secs': 1.295996} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.665886] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.666146] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Reconfigured VM to attach interface {{(pid=63538) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1943}} [ 896.670464] env[63538]: DEBUG nova.compute.utils [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 896.674274] env[63538]: DEBUG nova.compute.manager [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 896.674450] env[63538]: DEBUG nova.network.neutron [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 896.690209] env[63538]: DEBUG nova.network.neutron [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Successfully updated port: e650c787-b98c-4e3d-aa14-1a81d82c2c8b {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 896.718157] env[63538]: DEBUG oslo_concurrency.lockutils [None req-336ab733-d8cf-4d95-92d5-cd410fd6ae6d tempest-ServerPasswordTestJSON-52970672 tempest-ServerPasswordTestJSON-52970672-project-member] Lock "0e718984-cfce-4620-9be6-fdcfb4954da8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.602s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.752843] env[63538]: DEBUG nova.policy [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '29408f9a52d44cbc8eaa4e3b425b475a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1fe11c1386b14d139f4416cbf20fb201', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 896.760711] env[63538]: DEBUG oslo_vmware.api [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101141, 'name': CreateSnapshot_Task, 'duration_secs': 0.728413} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.761432] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Created Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 896.761863] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6754d1bd-05ea-4ba7-88d0-9c58812e8204 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.869528] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 5421e135-9581-4f81-aa8a-2a604887a1df] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 897.008738] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Task: {'id': task-5101143, 'name': Rename_Task, 'duration_secs': 0.211011} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.009123] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 897.009300] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-60791097-7f08-42c5-a48b-d97282ac6c16 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.017966] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Waiting for the task: (returnval){ [ 897.017966] env[63538]: value = "task-5101144" [ 897.017966] env[63538]: _type = "Task" [ 897.017966] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.033370] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Task: {'id': task-5101144, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.089077] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527bd475-d3aa-c7b3-3002-214e8a5e206f, 'name': SearchDatastore_Task, 'duration_secs': 0.009957} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.089583] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.090146] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 1db1d558-2473-49cb-b309-f7192bd6b9c1/1db1d558-2473-49cb-b309-f7192bd6b9c1.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 897.090588] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bd209f26-00c9-4fd6-806a-1ce830c0cc96 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.099706] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 897.099706] env[63538]: value = "task-5101145" [ 897.099706] env[63538]: _type = "Task" [ 897.099706] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.111264] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101145, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.172327] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fe62c96-a99d-4b15-8dbc-c26b01bd896e tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "interface-d967631f-5c8a-42d8-ac05-4cec3bdb55cf-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.038s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.178465] env[63538]: DEBUG nova.compute.manager [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 897.195770] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "refresh_cache-fa8ed101-914d-4751-ab9b-f68ad5da7a56" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.195948] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquired lock "refresh_cache-fa8ed101-914d-4751-ab9b-f68ad5da7a56" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.196139] env[63538]: DEBUG nova.network.neutron [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 897.241971] env[63538]: DEBUG nova.network.neutron [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Successfully created port: 8a332a90-393f-41ae-a924-4959c06e6207 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 897.284768] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Creating linked-clone VM from snapshot {{(pid=63538) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 897.288374] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-49151f75-b726-43a4-94d1-5ac5bed31361 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.300170] env[63538]: DEBUG oslo_vmware.api [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 897.300170] env[63538]: value = "task-5101146" [ 897.300170] env[63538]: _type = "Task" [ 897.300170] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.313127] env[63538]: DEBUG oslo_vmware.api [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101146, 'name': CloneVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.371436] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: ee9fe572-7a17-46db-8330-4b6f632c6b2c] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 897.488983] env[63538]: DEBUG nova.compute.manager [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 897.490402] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d94ceb7-d3c4-4469-92b6-cd7675fd917d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.530544] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Task: {'id': task-5101144, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.615895] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101145, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.702180] env[63538]: DEBUG nova.compute.manager [req-a72d7a4e-d670-4fed-a9bd-8ff9d35e58c0 req-f9ad1f46-b1c2-4f31-9382-38f6c49c1e03 service nova] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Received event network-changed-ed76faed-4b61-4cd6-833c-46bbb80f49e3 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 897.704066] env[63538]: DEBUG nova.compute.manager [req-a72d7a4e-d670-4fed-a9bd-8ff9d35e58c0 req-f9ad1f46-b1c2-4f31-9382-38f6c49c1e03 service nova] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Refreshing instance network info cache due to event network-changed-ed76faed-4b61-4cd6-833c-46bbb80f49e3. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 897.704066] env[63538]: DEBUG oslo_concurrency.lockutils [req-a72d7a4e-d670-4fed-a9bd-8ff9d35e58c0 req-f9ad1f46-b1c2-4f31-9382-38f6c49c1e03 service nova] Acquiring lock "refresh_cache-1db1d558-2473-49cb-b309-f7192bd6b9c1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.704066] env[63538]: DEBUG oslo_concurrency.lockutils [req-a72d7a4e-d670-4fed-a9bd-8ff9d35e58c0 req-f9ad1f46-b1c2-4f31-9382-38f6c49c1e03 service nova] Acquired lock "refresh_cache-1db1d558-2473-49cb-b309-f7192bd6b9c1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.704066] env[63538]: DEBUG nova.network.neutron [req-a72d7a4e-d670-4fed-a9bd-8ff9d35e58c0 req-f9ad1f46-b1c2-4f31-9382-38f6c49c1e03 service nova] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Refreshing network info cache for port ed76faed-4b61-4cd6-833c-46bbb80f49e3 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 897.768559] env[63538]: DEBUG nova.network.neutron [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 897.774774] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7005b46e-9d3f-42a0-aab5-a6eb27bba528 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.785238] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25183d5d-8e84-4693-8adf-d85e5e26c6b8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.829808] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7171e503-ae4e-4b74-abab-d6ee22a3f464 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.843714] env[63538]: DEBUG oslo_vmware.api [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101146, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.846392] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5888210c-6357-4643-86e9-69371fa2bebd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.862516] env[63538]: DEBUG nova.compute.provider_tree [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 897.875202] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: b5593b74-fe89-43f5-a8c6-e73159b4efac] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 897.901890] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e371b15b-2ee4-46e6-8243-e324b83b036e tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "d5d557c6-3d4e-4122-8756-218c9757fa01" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.904160] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e371b15b-2ee4-46e6-8243-e324b83b036e tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "d5d557c6-3d4e-4122-8756-218c9757fa01" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 898.009751] env[63538]: INFO nova.compute.manager [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] instance snapshotting [ 898.013128] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d8f0b2c-5793-4ddd-819c-43017b38e2d9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.041575] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-408887fc-cb1f-44c7-8b4d-3c3c20265071 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.045730] env[63538]: DEBUG nova.network.neutron [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Updating instance_info_cache with network_info: [{"id": "e650c787-b98c-4e3d-aa14-1a81d82c2c8b", "address": "fa:16:3e:88:c1:c3", "network": {"id": "106e253d-ba43-4eb6-a423-ed6a6d4156aa", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-330603874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d463d24e41b421eb7cb9d51ad207495", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape650c787-b9", "ovs_interfaceid": "e650c787-b98c-4e3d-aa14-1a81d82c2c8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.050530] env[63538]: DEBUG oslo_vmware.api [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Task: {'id': task-5101144, 'name': PowerOnVM_Task, 'duration_secs': 0.91294} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.051725] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 898.052066] env[63538]: INFO nova.compute.manager [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Took 9.24 seconds to spawn the instance on the hypervisor. [ 898.052333] env[63538]: DEBUG nova.compute.manager [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 898.053264] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a2fefc-87c1-49a1-98e0-d34734e417f6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.115317] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101145, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.664074} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.115614] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 1db1d558-2473-49cb-b309-f7192bd6b9c1/1db1d558-2473-49cb-b309-f7192bd6b9c1.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 898.115840] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 898.116153] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-65ef069f-bde9-4d4a-9d48-08ae317e31f1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.126482] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 898.126482] env[63538]: value = "task-5101147" [ 898.126482] env[63538]: _type = "Task" [ 898.126482] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.137262] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101147, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.197331] env[63538]: DEBUG nova.compute.manager [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 898.229636] env[63538]: DEBUG nova.virt.hardware [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 898.229866] env[63538]: DEBUG nova.virt.hardware [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 898.230081] env[63538]: DEBUG nova.virt.hardware [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 898.230391] env[63538]: DEBUG nova.virt.hardware [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 898.230497] env[63538]: DEBUG nova.virt.hardware [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 898.230658] env[63538]: DEBUG nova.virt.hardware [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 898.230872] env[63538]: DEBUG nova.virt.hardware [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 898.231072] env[63538]: DEBUG nova.virt.hardware [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 898.231257] env[63538]: DEBUG nova.virt.hardware [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 898.231428] env[63538]: DEBUG nova.virt.hardware [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 898.231609] env[63538]: DEBUG nova.virt.hardware [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 898.233449] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2871c74-bffa-43e0-8309-b443d8daf4c6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.245591] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b446c256-846f-4f76-aac3-71de7bf0245e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.334633] env[63538]: DEBUG oslo_vmware.api [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101146, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.379534] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 47500aaa-92fc-454c-badd-d6f8a2203083] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 898.387842] env[63538]: ERROR nova.scheduler.client.report [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [req-5239de3a-a440-42ab-af08-71d25573973a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f65218a4-1d3d-476a-9093-01cae92c8635. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5239de3a-a440-42ab-af08-71d25573973a"}]} [ 898.407608] env[63538]: INFO nova.compute.manager [None req-e371b15b-2ee4-46e6-8243-e324b83b036e tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Detaching volume bd560018-1c74-4223-aa48-6dd904889923 [ 898.412678] env[63538]: DEBUG nova.scheduler.client.report [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Refreshing inventories for resource provider f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 898.431225] env[63538]: DEBUG nova.scheduler.client.report [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Updating ProviderTree inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 898.431628] env[63538]: DEBUG nova.compute.provider_tree [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 898.443435] env[63538]: DEBUG nova.scheduler.client.report [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Refreshing aggregate associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, aggregates: None {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 898.465793] env[63538]: INFO nova.virt.block_device [None req-e371b15b-2ee4-46e6-8243-e324b83b036e tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Attempting to driver detach volume bd560018-1c74-4223-aa48-6dd904889923 from mountpoint /dev/sdb [ 898.465793] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e371b15b-2ee4-46e6-8243-e324b83b036e tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Volume detach. Driver type: vmdk {{(pid=63538) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 898.465793] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e371b15b-2ee4-46e6-8243-e324b83b036e tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992402', 'volume_id': 'bd560018-1c74-4223-aa48-6dd904889923', 'name': 'volume-bd560018-1c74-4223-aa48-6dd904889923', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd5d557c6-3d4e-4122-8756-218c9757fa01', 'attached_at': '', 'detached_at': '', 'volume_id': 'bd560018-1c74-4223-aa48-6dd904889923', 'serial': 'bd560018-1c74-4223-aa48-6dd904889923'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 898.465793] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85691f5c-719b-4e41-9fbb-f095b82440d6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.472292] env[63538]: DEBUG nova.scheduler.client.report [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Refreshing trait associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 898.508990] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c74022-f59f-4da5-80da-68934261deac {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.520842] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c39033a4-e5fc-4a6b-b15e-fc3344c1203b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.553956] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Releasing lock "refresh_cache-fa8ed101-914d-4751-ab9b-f68ad5da7a56" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.553956] env[63538]: DEBUG nova.compute.manager [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Instance network_info: |[{"id": "e650c787-b98c-4e3d-aa14-1a81d82c2c8b", "address": "fa:16:3e:88:c1:c3", "network": {"id": "106e253d-ba43-4eb6-a423-ed6a6d4156aa", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-330603874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d463d24e41b421eb7cb9d51ad207495", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape650c787-b9", "ovs_interfaceid": "e650c787-b98c-4e3d-aa14-1a81d82c2c8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 898.556659] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:c1:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bec903a9-d773-4d7c-a80c-c2533be346fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e650c787-b98c-4e3d-aa14-1a81d82c2c8b', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 898.565159] env[63538]: DEBUG oslo.service.loopingcall [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 898.565721] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-124b528e-91f2-4790-990c-fd760cd36fef {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.569790] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Creating Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 898.570072] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 898.574258] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c927eb46-4339-45dd-a081-048bfcc2eb54 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.579577] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b53451d6-5447-4bd7-87f2-db7efe5cf30c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.600059] env[63538]: INFO nova.compute.manager [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Took 57.57 seconds to build instance. [ 898.618721] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e371b15b-2ee4-46e6-8243-e324b83b036e tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] The volume has not been displaced from its original location: [datastore2] volume-bd560018-1c74-4223-aa48-6dd904889923/volume-bd560018-1c74-4223-aa48-6dd904889923.vmdk. No consolidation needed. {{(pid=63538) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 898.624740] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e371b15b-2ee4-46e6-8243-e324b83b036e tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Reconfiguring VM instance instance-00000034 to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 898.630425] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a75d3b4d-4b7c-433c-b4d8-be988bd058aa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.644059] env[63538]: DEBUG oslo_vmware.api [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 898.644059] env[63538]: value = "task-5101148" [ 898.644059] env[63538]: _type = "Task" [ 898.644059] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.644416] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 898.644416] env[63538]: value = "task-5101149" [ 898.644416] env[63538]: _type = "Task" [ 898.644416] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.645429] env[63538]: DEBUG nova.network.neutron [req-a72d7a4e-d670-4fed-a9bd-8ff9d35e58c0 req-f9ad1f46-b1c2-4f31-9382-38f6c49c1e03 service nova] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Updated VIF entry in instance network info cache for port ed76faed-4b61-4cd6-833c-46bbb80f49e3. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 898.645797] env[63538]: DEBUG nova.network.neutron [req-a72d7a4e-d670-4fed-a9bd-8ff9d35e58c0 req-f9ad1f46-b1c2-4f31-9382-38f6c49c1e03 service nova] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Updating instance_info_cache with network_info: [{"id": "ed76faed-4b61-4cd6-833c-46bbb80f49e3", "address": "fa:16:3e:71:77:a9", "network": {"id": "106e253d-ba43-4eb6-a423-ed6a6d4156aa", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-330603874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d463d24e41b421eb7cb9d51ad207495", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped76faed-4b", "ovs_interfaceid": "ed76faed-4b61-4cd6-833c-46bbb80f49e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.661175] env[63538]: DEBUG oslo_vmware.api [None req-e371b15b-2ee4-46e6-8243-e324b83b036e tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 898.661175] env[63538]: value = "task-5101150" [ 898.661175] env[63538]: _type = "Task" [ 898.661175] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.669407] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101147, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094085} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.678402] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 898.678732] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101149, 'name': CreateVM_Task} progress is 15%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.679836] env[63538]: DEBUG oslo_vmware.api [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101148, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.680796] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c5e8ca0-67a3-43f4-bb6e-a792bc854396 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.687919] env[63538]: DEBUG oslo_vmware.api [None req-e371b15b-2ee4-46e6-8243-e324b83b036e tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101150, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.712623] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] 1db1d558-2473-49cb-b309-f7192bd6b9c1/1db1d558-2473-49cb-b309-f7192bd6b9c1.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 898.716295] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed270308-9398-46c1-b3fb-647794ee0bc3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.739115] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 898.739115] env[63538]: value = "task-5101151" [ 898.739115] env[63538]: _type = "Task" [ 898.739115] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.751546] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101151, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.838511] env[63538]: DEBUG oslo_vmware.api [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101146, 'name': CloneVM_Task} progress is 95%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.885191] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: a6bb8713-6b00-4a43-96b7-a84ee39d790d] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 899.130101] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cfe2baac-f826-4577-b2ce-c9b983589c16 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Lock "b0b4ae9c-95d3-47a1-86a7-120c88b60704" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.108s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.139572] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-624c34b7-3be5-4f53-baee-d62bde6fb188 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.149281] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7027a9e-beeb-42a3-ba99-395f7e41a9c0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.152895] env[63538]: DEBUG oslo_concurrency.lockutils [req-a72d7a4e-d670-4fed-a9bd-8ff9d35e58c0 req-f9ad1f46-b1c2-4f31-9382-38f6c49c1e03 service nova] Releasing lock "refresh_cache-1db1d558-2473-49cb-b309-f7192bd6b9c1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.153179] env[63538]: DEBUG nova.compute.manager [req-a72d7a4e-d670-4fed-a9bd-8ff9d35e58c0 req-f9ad1f46-b1c2-4f31-9382-38f6c49c1e03 service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Received event network-vif-plugged-4c96df2c-ae17-49fe-84c1-d86dd4b46eb6 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 899.153427] env[63538]: DEBUG oslo_concurrency.lockutils [req-a72d7a4e-d670-4fed-a9bd-8ff9d35e58c0 req-f9ad1f46-b1c2-4f31-9382-38f6c49c1e03 service nova] Acquiring lock "d967631f-5c8a-42d8-ac05-4cec3bdb55cf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 899.153571] env[63538]: DEBUG oslo_concurrency.lockutils [req-a72d7a4e-d670-4fed-a9bd-8ff9d35e58c0 req-f9ad1f46-b1c2-4f31-9382-38f6c49c1e03 service nova] Lock "d967631f-5c8a-42d8-ac05-4cec3bdb55cf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 899.153732] env[63538]: DEBUG oslo_concurrency.lockutils [req-a72d7a4e-d670-4fed-a9bd-8ff9d35e58c0 req-f9ad1f46-b1c2-4f31-9382-38f6c49c1e03 service nova] Lock "d967631f-5c8a-42d8-ac05-4cec3bdb55cf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.153897] env[63538]: DEBUG nova.compute.manager [req-a72d7a4e-d670-4fed-a9bd-8ff9d35e58c0 req-f9ad1f46-b1c2-4f31-9382-38f6c49c1e03 service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] No waiting events found dispatching network-vif-plugged-4c96df2c-ae17-49fe-84c1-d86dd4b46eb6 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 899.154083] env[63538]: WARNING nova.compute.manager [req-a72d7a4e-d670-4fed-a9bd-8ff9d35e58c0 req-f9ad1f46-b1c2-4f31-9382-38f6c49c1e03 service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Received unexpected event network-vif-plugged-4c96df2c-ae17-49fe-84c1-d86dd4b46eb6 for instance with vm_state active and task_state None. [ 899.154338] env[63538]: DEBUG nova.compute.manager [req-a72d7a4e-d670-4fed-a9bd-8ff9d35e58c0 req-f9ad1f46-b1c2-4f31-9382-38f6c49c1e03 service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Received event network-changed-4c96df2c-ae17-49fe-84c1-d86dd4b46eb6 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 899.154403] env[63538]: DEBUG nova.compute.manager [req-a72d7a4e-d670-4fed-a9bd-8ff9d35e58c0 req-f9ad1f46-b1c2-4f31-9382-38f6c49c1e03 service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Refreshing instance network info cache due to event network-changed-4c96df2c-ae17-49fe-84c1-d86dd4b46eb6. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 899.154583] env[63538]: DEBUG oslo_concurrency.lockutils [req-a72d7a4e-d670-4fed-a9bd-8ff9d35e58c0 req-f9ad1f46-b1c2-4f31-9382-38f6c49c1e03 service nova] Acquiring lock "refresh_cache-d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.154733] env[63538]: DEBUG oslo_concurrency.lockutils [req-a72d7a4e-d670-4fed-a9bd-8ff9d35e58c0 req-f9ad1f46-b1c2-4f31-9382-38f6c49c1e03 service nova] Acquired lock "refresh_cache-d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.154925] env[63538]: DEBUG nova.network.neutron [req-a72d7a4e-d670-4fed-a9bd-8ff9d35e58c0 req-f9ad1f46-b1c2-4f31-9382-38f6c49c1e03 service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Refreshing network info cache for port 4c96df2c-ae17-49fe-84c1-d86dd4b46eb6 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 899.195354] env[63538]: DEBUG oslo_vmware.api [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101148, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.199835] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e2c466e-00a1-46e2-9652-ff9daaea0abe {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.202872] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101149, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.206604] env[63538]: DEBUG oslo_vmware.api [None req-e371b15b-2ee4-46e6-8243-e324b83b036e tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101150, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.212679] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4812ff1b-ea74-4665-9339-14cfd987236c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.230786] env[63538]: DEBUG nova.compute.provider_tree [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 899.251474] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101151, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.264679] env[63538]: DEBUG nova.network.neutron [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Successfully updated port: 8a332a90-393f-41ae-a924-4959c06e6207 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 899.325043] env[63538]: DEBUG oslo_concurrency.lockutils [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "8ed0bd15-71fc-435e-9e4a-90b023ad8a79" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 899.325376] env[63538]: DEBUG oslo_concurrency.lockutils [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "8ed0bd15-71fc-435e-9e4a-90b023ad8a79" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 899.336400] env[63538]: DEBUG oslo_vmware.api [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101146, 'name': CloneVM_Task, 'duration_secs': 1.774352} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.337234] env[63538]: INFO nova.virt.vmwareapi.vmops [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Created linked-clone VM from snapshot [ 899.337523] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83188b5d-921a-40eb-8952-ee8fc349d53f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.347011] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Uploading image b8bb0fea-6785-4c09-aa58-eb88b8eff6a7 {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 899.368494] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Destroying the VM {{(pid=63538) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 899.368837] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-44e65b15-61f2-4fe8-a873-9889aebf1993 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.377971] env[63538]: DEBUG oslo_vmware.api [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 899.377971] env[63538]: value = "task-5101152" [ 899.377971] env[63538]: _type = "Task" [ 899.377971] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.388578] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: e32789d5-59ba-4657-9a9c-84fc9bd6cfdf] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 899.391075] env[63538]: DEBUG oslo_vmware.api [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101152, 'name': Destroy_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.634426] env[63538]: DEBUG nova.compute.manager [None req-e17ba593-10d3-4f5a-badf-20060b38f7a3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2ab79158-dd1a-482e-9f82-9c64104e9076] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 899.679687] env[63538]: DEBUG oslo_vmware.api [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101148, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.680171] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101149, 'name': CreateVM_Task, 'duration_secs': 1.005833} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.681306] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 899.682510] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.682726] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.683295] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 899.683899] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc8b2ba1-dce5-4ace-b211-4cc382659dfb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.691567] env[63538]: DEBUG oslo_vmware.api [None req-e371b15b-2ee4-46e6-8243-e324b83b036e tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101150, 'name': ReconfigVM_Task, 'duration_secs': 0.940581} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.692393] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e371b15b-2ee4-46e6-8243-e324b83b036e tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Reconfigured VM instance instance-00000034 to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 899.700426] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-882c7691-5235-48c4-aab9-c88765a9bd25 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.713306] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 899.713306] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5264a194-ee76-9b1a-9109-8eac3ebe35ac" [ 899.713306] env[63538]: _type = "Task" [ 899.713306] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.720209] env[63538]: DEBUG oslo_vmware.api [None req-e371b15b-2ee4-46e6-8243-e324b83b036e tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 899.720209] env[63538]: value = "task-5101153" [ 899.720209] env[63538]: _type = "Task" [ 899.720209] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.729360] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5264a194-ee76-9b1a-9109-8eac3ebe35ac, 'name': SearchDatastore_Task, 'duration_secs': 0.016323} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.730565] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.730933] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 899.731829] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.731829] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.732024] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 899.737844] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a94b8430-bc67-4548-a496-43e029e9b10f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.744953] env[63538]: DEBUG oslo_vmware.api [None req-e371b15b-2ee4-46e6-8243-e324b83b036e tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101153, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.761084] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101151, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.762660] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 899.762919] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 899.764191] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccc1213e-f9ee-4d87-a70c-512897e36144 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.766924] env[63538]: DEBUG oslo_concurrency.lockutils [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "refresh_cache-0df15328-aebd-44c5-9c78-ee05f188ad95" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.767071] env[63538]: DEBUG oslo_concurrency.lockutils [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquired lock "refresh_cache-0df15328-aebd-44c5-9c78-ee05f188ad95" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.767220] env[63538]: DEBUG nova.network.neutron [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 899.772424] env[63538]: ERROR nova.scheduler.client.report [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [req-137bcd15-3f47-40a8-ae8c-d91804a636e3] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f65218a4-1d3d-476a-9093-01cae92c8635. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-137bcd15-3f47-40a8-ae8c-d91804a636e3"}]} [ 899.776297] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 899.776297] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]529ff4fc-c363-550b-b142-caf93fdc3882" [ 899.776297] env[63538]: _type = "Task" [ 899.776297] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.789441] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]529ff4fc-c363-550b-b142-caf93fdc3882, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.790633] env[63538]: DEBUG nova.scheduler.client.report [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Refreshing inventories for resource provider f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 899.807512] env[63538]: DEBUG nova.scheduler.client.report [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Updating ProviderTree inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 899.807512] env[63538]: DEBUG nova.compute.provider_tree [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 899.822596] env[63538]: DEBUG nova.scheduler.client.report [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Refreshing aggregate associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, aggregates: None {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 899.848760] env[63538]: DEBUG nova.scheduler.client.report [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Refreshing trait associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 899.889657] env[63538]: DEBUG oslo_vmware.api [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101152, 'name': Destroy_Task} progress is 33%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.893676] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: c065263a-fd40-4b44-a68e-0e03248d0bc0] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 900.033484] env[63538]: DEBUG nova.compute.manager [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Received event network-vif-plugged-e650c787-b98c-4e3d-aa14-1a81d82c2c8b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 900.033755] env[63538]: DEBUG oslo_concurrency.lockutils [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] Acquiring lock "fa8ed101-914d-4751-ab9b-f68ad5da7a56-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.033998] env[63538]: DEBUG oslo_concurrency.lockutils [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] Lock "fa8ed101-914d-4751-ab9b-f68ad5da7a56-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.034202] env[63538]: DEBUG oslo_concurrency.lockutils [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] Lock "fa8ed101-914d-4751-ab9b-f68ad5da7a56-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.034493] env[63538]: DEBUG nova.compute.manager [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] No waiting events found dispatching network-vif-plugged-e650c787-b98c-4e3d-aa14-1a81d82c2c8b {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 900.034737] env[63538]: WARNING nova.compute.manager [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Received unexpected event network-vif-plugged-e650c787-b98c-4e3d-aa14-1a81d82c2c8b for instance with vm_state building and task_state spawning. [ 900.034972] env[63538]: DEBUG nova.compute.manager [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Received event network-changed-e650c787-b98c-4e3d-aa14-1a81d82c2c8b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 900.035176] env[63538]: DEBUG nova.compute.manager [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Refreshing instance network info cache due to event network-changed-e650c787-b98c-4e3d-aa14-1a81d82c2c8b. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 900.035454] env[63538]: DEBUG oslo_concurrency.lockutils [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] Acquiring lock "refresh_cache-fa8ed101-914d-4751-ab9b-f68ad5da7a56" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.035586] env[63538]: DEBUG oslo_concurrency.lockutils [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] Acquired lock "refresh_cache-fa8ed101-914d-4751-ab9b-f68ad5da7a56" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.035883] env[63538]: DEBUG nova.network.neutron [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Refreshing network info cache for port e650c787-b98c-4e3d-aa14-1a81d82c2c8b {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 900.098374] env[63538]: DEBUG nova.network.neutron [req-a72d7a4e-d670-4fed-a9bd-8ff9d35e58c0 req-f9ad1f46-b1c2-4f31-9382-38f6c49c1e03 service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Updated VIF entry in instance network info cache for port 4c96df2c-ae17-49fe-84c1-d86dd4b46eb6. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 900.099055] env[63538]: DEBUG nova.network.neutron [req-a72d7a4e-d670-4fed-a9bd-8ff9d35e58c0 req-f9ad1f46-b1c2-4f31-9382-38f6c49c1e03 service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Updating instance_info_cache with network_info: [{"id": "696fc25d-fa83-4793-bffa-6bd2ce56f489", "address": "fa:16:3e:60:55:b3", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.154", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap696fc25d-fa", "ovs_interfaceid": "696fc25d-fa83-4793-bffa-6bd2ce56f489", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4c96df2c-ae17-49fe-84c1-d86dd4b46eb6", "address": "fa:16:3e:1e:f2:d9", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c96df2c-ae", "ovs_interfaceid": "4c96df2c-ae17-49fe-84c1-d86dd4b46eb6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.139129] env[63538]: DEBUG nova.compute.manager [None req-e17ba593-10d3-4f5a-badf-20060b38f7a3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2ab79158-dd1a-482e-9f82-9c64104e9076] Instance disappeared before build. {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2440}} [ 900.166830] env[63538]: DEBUG oslo_vmware.api [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101148, 'name': CreateSnapshot_Task, 'duration_secs': 1.511914} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.167132] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Created Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 900.167915] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b832548b-ff16-4c59-8bf9-e4077c12030d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.232665] env[63538]: DEBUG oslo_vmware.api [None req-e371b15b-2ee4-46e6-8243-e324b83b036e tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101153, 'name': ReconfigVM_Task, 'duration_secs': 0.213991} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.235455] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e371b15b-2ee4-46e6-8243-e324b83b036e tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992402', 'volume_id': 'bd560018-1c74-4223-aa48-6dd904889923', 'name': 'volume-bd560018-1c74-4223-aa48-6dd904889923', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd5d557c6-3d4e-4122-8756-218c9757fa01', 'attached_at': '', 'detached_at': '', 'volume_id': 'bd560018-1c74-4223-aa48-6dd904889923', 'serial': 'bd560018-1c74-4223-aa48-6dd904889923'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 900.261446] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101151, 'name': ReconfigVM_Task, 'duration_secs': 1.357725} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.264525] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Reconfigured VM instance instance-00000045 to attach disk [datastore2] 1db1d558-2473-49cb-b309-f7192bd6b9c1/1db1d558-2473-49cb-b309-f7192bd6b9c1.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 900.265458] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bc374ca3-9ef6-4e79-9d52-1d0975b4737e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.273939] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 900.273939] env[63538]: value = "task-5101154" [ 900.273939] env[63538]: _type = "Task" [ 900.273939] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.293958] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101154, 'name': Rename_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.298283] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]529ff4fc-c363-550b-b142-caf93fdc3882, 'name': SearchDatastore_Task, 'duration_secs': 0.014774} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.299580] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a0d7fcb-48a5-4335-a2f9-0461440422ad {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.311493] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 900.311493] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c51e36-329d-4ed7-688b-81c83bde247d" [ 900.311493] env[63538]: _type = "Task" [ 900.311493] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.319344] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c51e36-329d-4ed7-688b-81c83bde247d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.332232] env[63538]: DEBUG nova.network.neutron [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 900.390474] env[63538]: DEBUG oslo_vmware.api [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101152, 'name': Destroy_Task, 'duration_secs': 0.562779} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.391919] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Destroyed the VM [ 900.392667] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Deleting Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 900.394228] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8877d6f9-1c6a-4016-982d-555698d76221 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.396968] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-3d7706e7-a7e7-4f6f-8ffd-5e95dc0a801a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.399358] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 7de2ac8b-bc9e-4493-9a11-a28a1b3a9d0b] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 900.412238] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b7aa10-3c21-443d-8793-dcb3b351b4d6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.417066] env[63538]: DEBUG oslo_vmware.api [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 900.417066] env[63538]: value = "task-5101155" [ 900.417066] env[63538]: _type = "Task" [ 900.417066] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.450736] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-522415d9-fcf3-4728-bce8-82f051c9303f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.457473] env[63538]: DEBUG oslo_vmware.api [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101155, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.466538] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b3d389-8169-4437-95c4-846e632dfb7e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.485871] env[63538]: DEBUG nova.compute.provider_tree [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 900.602224] env[63538]: DEBUG oslo_concurrency.lockutils [req-a72d7a4e-d670-4fed-a9bd-8ff9d35e58c0 req-f9ad1f46-b1c2-4f31-9382-38f6c49c1e03 service nova] Releasing lock "refresh_cache-d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.638527] env[63538]: DEBUG nova.network.neutron [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Updating instance_info_cache with network_info: [{"id": "8a332a90-393f-41ae-a924-4959c06e6207", "address": "fa:16:3e:37:0e:9d", "network": {"id": "8fd1ecf4-3813-4ca4-82b4-8ba3f04e3c41", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1232144260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fe11c1386b14d139f4416cbf20fb201", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a332a90-39", "ovs_interfaceid": "8a332a90-393f-41ae-a924-4959c06e6207", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.659075] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e17ba593-10d3-4f5a-badf-20060b38f7a3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "2ab79158-dd1a-482e-9f82-9c64104e9076" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.634s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.690063] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Creating linked-clone VM from snapshot {{(pid=63538) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 900.690422] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b14333d4-f198-453f-a153-ef897d10e1ca {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.702547] env[63538]: DEBUG oslo_vmware.api [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 900.702547] env[63538]: value = "task-5101156" [ 900.702547] env[63538]: _type = "Task" [ 900.702547] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.712218] env[63538]: DEBUG oslo_vmware.api [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101156, 'name': CloneVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.788381] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101154, 'name': Rename_Task, 'duration_secs': 0.187833} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.788381] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 900.788381] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-57cb85aa-5eb1-40e3-b8ae-245dd85531e0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.793099] env[63538]: DEBUG nova.objects.instance [None req-e371b15b-2ee4-46e6-8243-e324b83b036e tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lazy-loading 'flavor' on Instance uuid d5d557c6-3d4e-4122-8756-218c9757fa01 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 900.796948] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 900.796948] env[63538]: value = "task-5101157" [ 900.796948] env[63538]: _type = "Task" [ 900.796948] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.811529] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101157, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.823631] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c51e36-329d-4ed7-688b-81c83bde247d, 'name': SearchDatastore_Task, 'duration_secs': 0.032846} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.824101] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.824502] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] fa8ed101-914d-4751-ab9b-f68ad5da7a56/fa8ed101-914d-4751-ab9b-f68ad5da7a56.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 900.824929] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8de48948-86b6-43bb-8c1e-c5fe97e63f07 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.835815] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 900.835815] env[63538]: value = "task-5101158" [ 900.835815] env[63538]: _type = "Task" [ 900.835815] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.847147] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101158, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.912729] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: dbf48807-08a7-46d1-8454-42437a9f87c0] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 900.934093] env[63538]: DEBUG oslo_vmware.api [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101155, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.955664] env[63538]: DEBUG nova.network.neutron [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Updated VIF entry in instance network info cache for port e650c787-b98c-4e3d-aa14-1a81d82c2c8b. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 900.955743] env[63538]: DEBUG nova.network.neutron [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Updating instance_info_cache with network_info: [{"id": "e650c787-b98c-4e3d-aa14-1a81d82c2c8b", "address": "fa:16:3e:88:c1:c3", "network": {"id": "106e253d-ba43-4eb6-a423-ed6a6d4156aa", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-330603874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d463d24e41b421eb7cb9d51ad207495", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape650c787-b9", "ovs_interfaceid": "e650c787-b98c-4e3d-aa14-1a81d82c2c8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.966209] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4ea43fff-20f6-48dd-ab0d-882cf82133f4 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Acquiring lock "b0b4ae9c-95d3-47a1-86a7-120c88b60704" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.966572] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4ea43fff-20f6-48dd-ab0d-882cf82133f4 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Lock "b0b4ae9c-95d3-47a1-86a7-120c88b60704" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.966931] env[63538]: INFO nova.compute.manager [None req-4ea43fff-20f6-48dd-ab0d-882cf82133f4 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Rebooting instance [ 901.024377] env[63538]: DEBUG nova.scheduler.client.report [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Updated inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 with generation 100 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 901.024814] env[63538]: DEBUG nova.compute.provider_tree [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Updating resource provider f65218a4-1d3d-476a-9093-01cae92c8635 generation from 100 to 101 during operation: update_inventory {{(pid=63538) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 901.025070] env[63538]: DEBUG nova.compute.provider_tree [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 901.070144] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "interface-d967631f-5c8a-42d8-ac05-4cec3bdb55cf-4c96df2c-ae17-49fe-84c1-d86dd4b46eb6" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.070452] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "interface-d967631f-5c8a-42d8-ac05-4cec3bdb55cf-4c96df2c-ae17-49fe-84c1-d86dd4b46eb6" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.141301] env[63538]: DEBUG oslo_concurrency.lockutils [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Releasing lock "refresh_cache-0df15328-aebd-44c5-9c78-ee05f188ad95" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.141659] env[63538]: DEBUG nova.compute.manager [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Instance network_info: |[{"id": "8a332a90-393f-41ae-a924-4959c06e6207", "address": "fa:16:3e:37:0e:9d", "network": {"id": "8fd1ecf4-3813-4ca4-82b4-8ba3f04e3c41", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1232144260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fe11c1386b14d139f4416cbf20fb201", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a332a90-39", "ovs_interfaceid": "8a332a90-393f-41ae-a924-4959c06e6207", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 901.142140] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:0e:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78340140-126f-4ef8-a340-debaa64da3e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8a332a90-393f-41ae-a924-4959c06e6207', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 901.156721] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Creating folder: Project (1fe11c1386b14d139f4416cbf20fb201). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 901.158710] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6344cfcf-b9c0-4c14-8941-b32bbe643d82 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.161946] env[63538]: DEBUG nova.compute.manager [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 901.173575] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Created folder: Project (1fe11c1386b14d139f4416cbf20fb201) in parent group-v992234. [ 901.173800] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Creating folder: Instances. Parent ref: group-v992430. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 901.174076] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-607234f4-8ca6-4158-9b38-3afab2902508 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.186626] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Created folder: Instances in parent group-v992430. [ 901.186951] env[63538]: DEBUG oslo.service.loopingcall [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 901.187238] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 901.187473] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9ffc09f2-7334-432c-aaac-21517dd12f5f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.209998] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 901.209998] env[63538]: value = "task-5101161" [ 901.209998] env[63538]: _type = "Task" [ 901.209998] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.218542] env[63538]: DEBUG oslo_vmware.api [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101156, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.225177] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101161, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.322594] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101157, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.351469] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101158, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.416617] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 543875b5-195a-476d-a0b4-3211ceefa27f] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 901.431813] env[63538]: DEBUG oslo_vmware.api [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101155, 'name': RemoveSnapshot_Task, 'duration_secs': 0.559607} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.432199] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Deleted Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 901.461998] env[63538]: DEBUG oslo_concurrency.lockutils [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] Releasing lock "refresh_cache-fa8ed101-914d-4751-ab9b-f68ad5da7a56" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.461998] env[63538]: DEBUG nova.compute.manager [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Received event network-vif-plugged-8a332a90-393f-41ae-a924-4959c06e6207 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 901.461998] env[63538]: DEBUG oslo_concurrency.lockutils [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] Acquiring lock "0df15328-aebd-44c5-9c78-ee05f188ad95-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.461998] env[63538]: DEBUG oslo_concurrency.lockutils [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] Lock "0df15328-aebd-44c5-9c78-ee05f188ad95-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.461998] env[63538]: DEBUG oslo_concurrency.lockutils [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] Lock "0df15328-aebd-44c5-9c78-ee05f188ad95-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.461998] env[63538]: DEBUG nova.compute.manager [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] No waiting events found dispatching network-vif-plugged-8a332a90-393f-41ae-a924-4959c06e6207 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 901.461998] env[63538]: WARNING nova.compute.manager [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Received unexpected event network-vif-plugged-8a332a90-393f-41ae-a924-4959c06e6207 for instance with vm_state building and task_state spawning. [ 901.461998] env[63538]: DEBUG nova.compute.manager [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Received event network-changed-8a332a90-393f-41ae-a924-4959c06e6207 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 901.462446] env[63538]: DEBUG nova.compute.manager [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Refreshing instance network info cache due to event network-changed-8a332a90-393f-41ae-a924-4959c06e6207. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 901.462572] env[63538]: DEBUG oslo_concurrency.lockutils [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] Acquiring lock "refresh_cache-0df15328-aebd-44c5-9c78-ee05f188ad95" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.462787] env[63538]: DEBUG oslo_concurrency.lockutils [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] Acquired lock "refresh_cache-0df15328-aebd-44c5-9c78-ee05f188ad95" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.463044] env[63538]: DEBUG nova.network.neutron [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Refreshing network info cache for port 8a332a90-393f-41ae-a924-4959c06e6207 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 901.500421] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4ea43fff-20f6-48dd-ab0d-882cf82133f4 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Acquiring lock "refresh_cache-b0b4ae9c-95d3-47a1-86a7-120c88b60704" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.500421] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4ea43fff-20f6-48dd-ab0d-882cf82133f4 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Acquired lock "refresh_cache-b0b4ae9c-95d3-47a1-86a7-120c88b60704" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.500421] env[63538]: DEBUG nova.network.neutron [None req-4ea43fff-20f6-48dd-ab0d-882cf82133f4 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 901.531695] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.368s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.532799] env[63538]: DEBUG nova.compute.manager [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 901.539934] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 34.133s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.576029] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.576029] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.576029] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2db19cc-29bd-4c7d-821e-a55eba814ca1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.596466] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86466d96-a2b8-42c6-a44e-56e919cf4553 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.629317] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Reconfiguring VM to detach interface {{(pid=63538) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1974}} [ 901.629995] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c2a4012-85f0-475a-bb5a-259d16a080a1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.650786] env[63538]: DEBUG oslo_vmware.api [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 901.650786] env[63538]: value = "task-5101162" [ 901.650786] env[63538]: _type = "Task" [ 901.650786] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.661100] env[63538]: DEBUG oslo_vmware.api [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101162, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.685599] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.720281] env[63538]: DEBUG oslo_vmware.api [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101156, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.731962] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101161, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.812871] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e371b15b-2ee4-46e6-8243-e324b83b036e tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "d5d557c6-3d4e-4122-8756-218c9757fa01" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.911s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.814209] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101157, 'name': PowerOnVM_Task, 'duration_secs': 0.607751} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.814386] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 901.814601] env[63538]: INFO nova.compute.manager [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Took 10.32 seconds to spawn the instance on the hypervisor. [ 901.814832] env[63538]: DEBUG nova.compute.manager [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 901.815904] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f125bc89-02b5-44cd-9f21-a5d369200046 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.847748] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101158, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.664444} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.848044] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] fa8ed101-914d-4751-ab9b-f68ad5da7a56/fa8ed101-914d-4751-ab9b-f68ad5da7a56.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 901.848277] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 901.848895] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e28fe687-ca60-4ef7-87a3-ae932c1e2376 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.857459] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 901.857459] env[63538]: value = "task-5101163" [ 901.857459] env[63538]: _type = "Task" [ 901.857459] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.869335] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101163, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.925210] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 102c0463-fb64-4dda-914c-b98c8e9991ad] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 901.937957] env[63538]: WARNING nova.compute.manager [None req-5cb7eaed-f4b6-4193-896c-1a015a44a5e0 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Image not found during snapshot: nova.exception.ImageNotFound: Image b8bb0fea-6785-4c09-aa58-eb88b8eff6a7 could not be found. [ 902.040687] env[63538]: DEBUG nova.compute.utils [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 902.042189] env[63538]: DEBUG nova.compute.manager [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 902.042363] env[63538]: DEBUG nova.network.neutron [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 902.047126] env[63538]: DEBUG nova.objects.instance [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lazy-loading 'migration_context' on Instance uuid a2e036ae-318b-44ea-9db0-10fa3838728b {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 902.113978] env[63538]: DEBUG nova.policy [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9e7850362eef47f1b623b6e004d60ade', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '55edcd65da7b4a569a4c27aab4819cde', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 902.161952] env[63538]: DEBUG oslo_vmware.api [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.222837] env[63538]: DEBUG oslo_concurrency.lockutils [None req-466435b9-ac8f-4965-99bf-3e6f69a58c16 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "d5d557c6-3d4e-4122-8756-218c9757fa01" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.223235] env[63538]: DEBUG oslo_concurrency.lockutils [None req-466435b9-ac8f-4965-99bf-3e6f69a58c16 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "d5d557c6-3d4e-4122-8756-218c9757fa01" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.223431] env[63538]: DEBUG nova.compute.manager [None req-466435b9-ac8f-4965-99bf-3e6f69a58c16 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 902.223772] env[63538]: DEBUG oslo_vmware.api [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101156, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.229766] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d874e0-aeed-40e9-a2b6-9f2ea6fca984 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.235330] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101161, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.240371] env[63538]: DEBUG nova.compute.manager [None req-466435b9-ac8f-4965-99bf-3e6f69a58c16 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63538) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 902.241046] env[63538]: DEBUG nova.objects.instance [None req-466435b9-ac8f-4965-99bf-3e6f69a58c16 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lazy-loading 'flavor' on Instance uuid d5d557c6-3d4e-4122-8756-218c9757fa01 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 902.339093] env[63538]: INFO nova.compute.manager [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Took 56.32 seconds to build instance. [ 902.372712] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101163, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075192} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.372855] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 902.374226] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a31694-8e3a-44d1-80fe-e2e4318b28d7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.403554] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] fa8ed101-914d-4751-ab9b-f68ad5da7a56/fa8ed101-914d-4751-ab9b-f68ad5da7a56.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 902.407515] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-323c47e7-d26f-4b76-a5b6-72f42e038d01 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.430992] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 1e33b68e-8509-4ec4-8ec4-dc758aae9a5a] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 902.439487] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 902.439487] env[63538]: value = "task-5101164" [ 902.439487] env[63538]: _type = "Task" [ 902.439487] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.449297] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101164, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.525019] env[63538]: DEBUG nova.network.neutron [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Successfully created port: cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 902.549267] env[63538]: DEBUG nova.compute.manager [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 902.661409] env[63538]: DEBUG oslo_vmware.api [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.725429] env[63538]: DEBUG oslo_vmware.api [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101156, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.735414] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101161, 'name': CreateVM_Task, 'duration_secs': 1.434341} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.735414] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 902.735414] env[63538]: DEBUG oslo_concurrency.lockutils [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.735414] env[63538]: DEBUG oslo_concurrency.lockutils [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.735414] env[63538]: DEBUG oslo_concurrency.lockutils [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 902.735414] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0603357a-2c86-46bd-b9c9-76a49e80a965 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.740878] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 902.740878] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5285b2b2-e47f-d8f9-fec8-1d56aafe3e6f" [ 902.740878] env[63538]: _type = "Task" [ 902.740878] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.750668] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-466435b9-ac8f-4965-99bf-3e6f69a58c16 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 902.751098] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-78f39f38-f952-443d-9029-95d18010ca11 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.757758] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5285b2b2-e47f-d8f9-fec8-1d56aafe3e6f, 'name': SearchDatastore_Task, 'duration_secs': 0.012412} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.758843] env[63538]: DEBUG oslo_concurrency.lockutils [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.759430] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 902.759727] env[63538]: DEBUG oslo_concurrency.lockutils [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.759946] env[63538]: DEBUG oslo_concurrency.lockutils [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.760198] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 902.760542] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94352906-af16-4634-84ab-cbb6e865659f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.769535] env[63538]: DEBUG oslo_vmware.api [None req-466435b9-ac8f-4965-99bf-3e6f69a58c16 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 902.769535] env[63538]: value = "task-5101165" [ 902.769535] env[63538]: _type = "Task" [ 902.769535] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.779687] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 902.780240] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 902.787345] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90b471f7-d1b3-4f9b-b3d4-60ad14b1eb3a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.790821] env[63538]: DEBUG oslo_vmware.api [None req-466435b9-ac8f-4965-99bf-3e6f69a58c16 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101165, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.798331] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 902.798331] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522ce824-d48a-0733-7428-7dc4f040b938" [ 902.798331] env[63538]: _type = "Task" [ 902.798331] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.802839] env[63538]: DEBUG nova.network.neutron [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Updated VIF entry in instance network info cache for port 8a332a90-393f-41ae-a924-4959c06e6207. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 902.803312] env[63538]: DEBUG nova.network.neutron [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Updating instance_info_cache with network_info: [{"id": "8a332a90-393f-41ae-a924-4959c06e6207", "address": "fa:16:3e:37:0e:9d", "network": {"id": "8fd1ecf4-3813-4ca4-82b4-8ba3f04e3c41", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1232144260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fe11c1386b14d139f4416cbf20fb201", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a332a90-39", "ovs_interfaceid": "8a332a90-393f-41ae-a924-4959c06e6207", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.807061] env[63538]: DEBUG nova.network.neutron [None req-4ea43fff-20f6-48dd-ab0d-882cf82133f4 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Updating instance_info_cache with network_info: [{"id": "2dc178e5-2d66-4747-ae40-c03f69eba8e8", "address": "fa:16:3e:58:4d:69", "network": {"id": "9c8a7880-0663-49c8-8cc7-df517ad2ab08", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1858496628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fae9c04cde44afbb9a8295910faf2dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dc178e5-2d", "ovs_interfaceid": "2dc178e5-2d66-4747-ae40-c03f69eba8e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.817273] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522ce824-d48a-0733-7428-7dc4f040b938, 'name': SearchDatastore_Task, 'duration_secs': 0.012217} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.817879] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd8ad8ac-6bca-41b0-8153-0db2fde0604e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.826432] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 902.826432] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5267c4aa-49c4-eaec-3d74-9c1b5963a8ba" [ 902.826432] env[63538]: _type = "Task" [ 902.826432] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.839982] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5267c4aa-49c4-eaec-3d74-9c1b5963a8ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.845692] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "1db1d558-2473-49cb-b309-f7192bd6b9c1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.834s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.936851] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: bf54098e-91a8-403f-a6fe-b58a62daaadb] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 902.951620] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101164, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.166260] env[63538]: DEBUG oslo_vmware.api [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.172712] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e23b0252-061a-47c7-80f5-43cfdbe974b5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.181029] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe3dd1cc-475f-49c0-9b69-5ddc51f60e08 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.219701] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a90474ec-d76a-4189-8e30-590256a1353a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.230228] env[63538]: DEBUG oslo_vmware.api [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101156, 'name': CloneVM_Task} progress is 95%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.231526] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a2ac0e-1394-4da1-9017-8af87823a6b5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.246586] env[63538]: DEBUG nova.compute.provider_tree [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 903.283283] env[63538]: DEBUG oslo_vmware.api [None req-466435b9-ac8f-4965-99bf-3e6f69a58c16 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101165, 'name': PowerOffVM_Task, 'duration_secs': 0.379677} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.283283] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-466435b9-ac8f-4965-99bf-3e6f69a58c16 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 903.283283] env[63538]: DEBUG nova.compute.manager [None req-466435b9-ac8f-4965-99bf-3e6f69a58c16 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 903.283605] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a360c9eb-bbbf-42fe-bf72-9b103d68b2d9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.310030] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4ea43fff-20f6-48dd-ab0d-882cf82133f4 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Releasing lock "refresh_cache-b0b4ae9c-95d3-47a1-86a7-120c88b60704" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.314087] env[63538]: DEBUG oslo_concurrency.lockutils [req-f59e93bd-cbea-433d-8d6d-cdf2b6872b04 req-96bd0db6-7082-494e-aac7-515f9e50570b service nova] Releasing lock "refresh_cache-0df15328-aebd-44c5-9c78-ee05f188ad95" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.314087] env[63538]: DEBUG nova.compute.manager [None req-4ea43fff-20f6-48dd-ab0d-882cf82133f4 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 903.315032] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36233d45-827c-494d-ba43-b5fb7cd635cc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.338907] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5267c4aa-49c4-eaec-3d74-9c1b5963a8ba, 'name': SearchDatastore_Task, 'duration_secs': 0.011538} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.339521] env[63538]: DEBUG oslo_concurrency.lockutils [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.339521] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 0df15328-aebd-44c5-9c78-ee05f188ad95/0df15328-aebd-44c5-9c78-ee05f188ad95.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 903.339851] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09715ec8-64b4-4494-b241-620194a20cb7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.349524] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 903.349524] env[63538]: value = "task-5101166" [ 903.349524] env[63538]: _type = "Task" [ 903.349524] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.353765] env[63538]: DEBUG nova.compute.manager [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 903.365133] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101166, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.447982] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 36d40b69-fae7-4867-afa1-4befdc96bde0] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 903.458321] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101164, 'name': ReconfigVM_Task, 'duration_secs': 0.700258} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.458842] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Reconfigured VM instance instance-00000046 to attach disk [datastore2] fa8ed101-914d-4751-ab9b-f68ad5da7a56/fa8ed101-914d-4751-ab9b-f68ad5da7a56.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 903.459782] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-35259730-6137-411a-afd2-7767b370c347 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.468805] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 903.468805] env[63538]: value = "task-5101167" [ 903.468805] env[63538]: _type = "Task" [ 903.468805] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.480089] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101167, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.564406] env[63538]: DEBUG nova.compute.manager [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 903.589456] env[63538]: DEBUG nova.virt.hardware [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 903.589727] env[63538]: DEBUG nova.virt.hardware [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 903.589904] env[63538]: DEBUG nova.virt.hardware [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 903.590126] env[63538]: DEBUG nova.virt.hardware [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 903.590281] env[63538]: DEBUG nova.virt.hardware [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 903.590432] env[63538]: DEBUG nova.virt.hardware [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 903.590642] env[63538]: DEBUG nova.virt.hardware [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 903.590801] env[63538]: DEBUG nova.virt.hardware [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 903.590975] env[63538]: DEBUG nova.virt.hardware [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 903.591157] env[63538]: DEBUG nova.virt.hardware [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 903.591337] env[63538]: DEBUG nova.virt.hardware [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 903.592589] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a55398a-e310-4f8f-a4dc-d4b4ad99fed9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.602766] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9394cc7-b018-4314-ad84-cfb802bbed2a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.662987] env[63538]: DEBUG oslo_vmware.api [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.730487] env[63538]: DEBUG oslo_vmware.api [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101156, 'name': CloneVM_Task, 'duration_secs': 2.760474} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.730908] env[63538]: INFO nova.virt.vmwareapi.vmops [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Created linked-clone VM from snapshot [ 903.731921] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71cd22ad-4894-4016-87e1-5530c6fad02e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.742337] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Uploading image 375e564f-7253-420e-99be-0bda90d5793d {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 903.750467] env[63538]: DEBUG nova.scheduler.client.report [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 903.771331] env[63538]: DEBUG oslo_vmware.rw_handles [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 903.771331] env[63538]: value = "vm-992429" [ 903.771331] env[63538]: _type = "VirtualMachine" [ 903.771331] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 903.771726] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-bb287df8-4b56-4aa5-98b5-fedfb9d446fd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.782844] env[63538]: DEBUG oslo_vmware.rw_handles [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lease: (returnval){ [ 903.782844] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5258c5bf-add1-3ccf-2625-7fe9568163fe" [ 903.782844] env[63538]: _type = "HttpNfcLease" [ 903.782844] env[63538]: } obtained for exporting VM: (result){ [ 903.782844] env[63538]: value = "vm-992429" [ 903.782844] env[63538]: _type = "VirtualMachine" [ 903.782844] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 903.782844] env[63538]: DEBUG oslo_vmware.api [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the lease: (returnval){ [ 903.782844] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5258c5bf-add1-3ccf-2625-7fe9568163fe" [ 903.782844] env[63538]: _type = "HttpNfcLease" [ 903.782844] env[63538]: } to be ready. {{(pid=63538) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 903.793367] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 903.793367] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5258c5bf-add1-3ccf-2625-7fe9568163fe" [ 903.793367] env[63538]: _type = "HttpNfcLease" [ 903.793367] env[63538]: } is initializing. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 903.798600] env[63538]: DEBUG oslo_concurrency.lockutils [None req-466435b9-ac8f-4965-99bf-3e6f69a58c16 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "d5d557c6-3d4e-4122-8756-218c9757fa01" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.575s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.865335] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101166, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.883136] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.938513] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquiring lock "edc670dd-732a-4c54-924c-c99ee539d4d9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.938953] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "edc670dd-732a-4c54-924c-c99ee539d4d9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.938953] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquiring lock "edc670dd-732a-4c54-924c-c99ee539d4d9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.940112] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "edc670dd-732a-4c54-924c-c99ee539d4d9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.940112] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "edc670dd-732a-4c54-924c-c99ee539d4d9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.944411] env[63538]: INFO nova.compute.manager [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Terminating instance [ 903.947255] env[63538]: DEBUG nova.compute.manager [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 903.947392] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 903.948408] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f25656fc-6548-487b-8233-a6c46f5aaa44 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.951584] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 10d69b79-5ce4-4fea-abd5-0e6e7c83a3bf] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 903.963193] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 903.963193] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-316b4078-3b25-4ff5-be9c-df0fb29d9728 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.971671] env[63538]: DEBUG oslo_vmware.api [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 903.971671] env[63538]: value = "task-5101169" [ 903.971671] env[63538]: _type = "Task" [ 903.971671] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.986934] env[63538]: DEBUG oslo_vmware.api [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101169, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.990242] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101167, 'name': Rename_Task, 'duration_secs': 0.166674} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.990652] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 903.991412] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-346cd529-a9ea-4a3e-9ec4-a6a777d61103 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.001580] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 904.001580] env[63538]: value = "task-5101170" [ 904.001580] env[63538]: _type = "Task" [ 904.001580] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.012045] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101170, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.168956] env[63538]: DEBUG oslo_vmware.api [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.284664] env[63538]: DEBUG nova.compute.manager [req-f8fc8eb2-d1a1-4ba8-8799-7af6aa34374d req-66b2879d-395b-4e94-ac38-06bc0f48f697 service nova] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Received event network-vif-plugged-cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 904.284985] env[63538]: DEBUG oslo_concurrency.lockutils [req-f8fc8eb2-d1a1-4ba8-8799-7af6aa34374d req-66b2879d-395b-4e94-ac38-06bc0f48f697 service nova] Acquiring lock "edcc5700-7b1e-494a-82d1-844373a9d5a6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.285463] env[63538]: DEBUG oslo_concurrency.lockutils [req-f8fc8eb2-d1a1-4ba8-8799-7af6aa34374d req-66b2879d-395b-4e94-ac38-06bc0f48f697 service nova] Lock "edcc5700-7b1e-494a-82d1-844373a9d5a6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.285727] env[63538]: DEBUG oslo_concurrency.lockutils [req-f8fc8eb2-d1a1-4ba8-8799-7af6aa34374d req-66b2879d-395b-4e94-ac38-06bc0f48f697 service nova] Lock "edcc5700-7b1e-494a-82d1-844373a9d5a6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.285963] env[63538]: DEBUG nova.compute.manager [req-f8fc8eb2-d1a1-4ba8-8799-7af6aa34374d req-66b2879d-395b-4e94-ac38-06bc0f48f697 service nova] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] No waiting events found dispatching network-vif-plugged-cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 904.286216] env[63538]: WARNING nova.compute.manager [req-f8fc8eb2-d1a1-4ba8-8799-7af6aa34374d req-66b2879d-395b-4e94-ac38-06bc0f48f697 service nova] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Received unexpected event network-vif-plugged-cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189 for instance with vm_state building and task_state spawning. [ 904.296079] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 904.296079] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5258c5bf-add1-3ccf-2625-7fe9568163fe" [ 904.296079] env[63538]: _type = "HttpNfcLease" [ 904.296079] env[63538]: } is ready. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 904.296279] env[63538]: DEBUG oslo_vmware.rw_handles [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 904.296279] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5258c5bf-add1-3ccf-2625-7fe9568163fe" [ 904.296279] env[63538]: _type = "HttpNfcLease" [ 904.296279] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 904.297202] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db983923-2b31-496f-ac79-a57f5188de01 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.308481] env[63538]: DEBUG oslo_vmware.rw_handles [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cd8c0c-9dd6-d3b4-f85d-a19b54df3c66/disk-0.vmdk from lease info. {{(pid=63538) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 904.308761] env[63538]: DEBUG oslo_vmware.rw_handles [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cd8c0c-9dd6-d3b4-f85d-a19b54df3c66/disk-0.vmdk for reading. {{(pid=63538) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 904.368205] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4327ed6e-12bc-44c7-bd1b-8be81fe6d4af {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.383977] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101166, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.384309] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4ea43fff-20f6-48dd-ab0d-882cf82133f4 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Doing hard reboot of VM {{(pid=63538) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1064}} [ 904.384573] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-f4b721d2-d376-4eda-861d-529bac47b67f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.392678] env[63538]: DEBUG oslo_vmware.api [None req-4ea43fff-20f6-48dd-ab0d-882cf82133f4 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Waiting for the task: (returnval){ [ 904.392678] env[63538]: value = "task-5101171" [ 904.392678] env[63538]: _type = "Task" [ 904.392678] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.402363] env[63538]: DEBUG oslo_vmware.api [None req-4ea43fff-20f6-48dd-ab0d-882cf82133f4 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Task: {'id': task-5101171, 'name': ResetVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.426731] env[63538]: DEBUG nova.network.neutron [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Successfully updated port: cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 904.437309] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquiring lock "8097cb1c-bbba-45a8-be81-64d38decb1df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.437552] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Lock "8097cb1c-bbba-45a8-be81-64d38decb1df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.454889] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: e3ba860b-afb8-4843-9d99-049dce205f9f] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 904.465927] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-71bf9503-9e5e-4fb0-9b26-4559340b3635 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.485875] env[63538]: DEBUG oslo_vmware.api [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101169, 'name': PowerOffVM_Task, 'duration_secs': 0.197125} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.488900] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 904.489128] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 904.489615] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8048789c-a022-416c-8272-4cabfb55c3a7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.519255] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101170, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.645159] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 904.645341] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 904.645440] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Deleting the datastore file [datastore2] edc670dd-732a-4c54-924c-c99ee539d4d9 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 904.645768] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e859da79-fdf9-4e61-8168-9f33bc8027e6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.659107] env[63538]: DEBUG oslo_vmware.api [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 904.659107] env[63538]: value = "task-5101173" [ 904.659107] env[63538]: _type = "Task" [ 904.659107] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.666512] env[63538]: DEBUG oslo_vmware.api [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.671592] env[63538]: DEBUG oslo_vmware.api [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101173, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.763291] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.225s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.769953] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.259s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.771456] env[63538]: INFO nova.compute.claims [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 904.878991] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101166, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.903746] env[63538]: DEBUG oslo_vmware.api [None req-4ea43fff-20f6-48dd-ab0d-882cf82133f4 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Task: {'id': task-5101171, 'name': ResetVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.929208] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "refresh_cache-edcc5700-7b1e-494a-82d1-844373a9d5a6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.929446] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquired lock "refresh_cache-edcc5700-7b1e-494a-82d1-844373a9d5a6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.929746] env[63538]: DEBUG nova.network.neutron [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 904.958203] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 0a7c34e0-1acc-4761-804a-eb9ee00fdd77] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 905.020696] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101170, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.118837] env[63538]: DEBUG nova.objects.instance [None req-e1311821-f6df-4409-a7fc-507e335adeda tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lazy-loading 'flavor' on Instance uuid d5d557c6-3d4e-4122-8756-218c9757fa01 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 905.167554] env[63538]: DEBUG oslo_vmware.api [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.173312] env[63538]: DEBUG oslo_vmware.api [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101173, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.453056} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.173784] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 905.174109] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 905.174420] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 905.174798] env[63538]: INFO nova.compute.manager [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Took 1.23 seconds to destroy the instance on the hypervisor. [ 905.175254] env[63538]: DEBUG oslo.service.loopingcall [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 905.175605] env[63538]: DEBUG nova.compute.manager [-] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 905.175788] env[63538]: DEBUG nova.network.neutron [-] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 905.379913] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101166, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.669658} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.380322] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 0df15328-aebd-44c5-9c78-ee05f188ad95/0df15328-aebd-44c5-9c78-ee05f188ad95.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 905.381143] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 905.381143] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a0d9548a-4d38-4cae-9b03-4436418c3e6a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.390582] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 905.390582] env[63538]: value = "task-5101174" [ 905.390582] env[63538]: _type = "Task" [ 905.390582] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.407340] env[63538]: DEBUG oslo_vmware.api [None req-4ea43fff-20f6-48dd-ab0d-882cf82133f4 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Task: {'id': task-5101171, 'name': ResetVM_Task, 'duration_secs': 0.752016} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.410318] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4ea43fff-20f6-48dd-ab0d-882cf82133f4 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Did hard reboot of VM {{(pid=63538) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1068}} [ 905.410514] env[63538]: DEBUG nova.compute.manager [None req-4ea43fff-20f6-48dd-ab0d-882cf82133f4 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 905.411134] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101174, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.411891] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b3eae5-fe3d-4c8f-9ff4-8b26c842bdd5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.461761] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: d99b7b8e-633f-4fba-bce6-9b8e9e9892d1] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 905.495635] env[63538]: DEBUG nova.network.neutron [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 905.519181] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101170, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.624913] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e1311821-f6df-4409-a7fc-507e335adeda tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "refresh_cache-d5d557c6-3d4e-4122-8756-218c9757fa01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 905.625221] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e1311821-f6df-4409-a7fc-507e335adeda tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquired lock "refresh_cache-d5d557c6-3d4e-4122-8756-218c9757fa01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.625507] env[63538]: DEBUG nova.network.neutron [None req-e1311821-f6df-4409-a7fc-507e335adeda tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 905.625507] env[63538]: DEBUG nova.objects.instance [None req-e1311821-f6df-4409-a7fc-507e335adeda tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lazy-loading 'info_cache' on Instance uuid d5d557c6-3d4e-4122-8756-218c9757fa01 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 905.666087] env[63538]: DEBUG oslo_vmware.api [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.675909] env[63538]: DEBUG nova.network.neutron [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Updating instance_info_cache with network_info: [{"id": "cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189", "address": "fa:16:3e:2d:31:f0", "network": {"id": "549f45d6-3d4f-4476-92ba-4bd87fefba51", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-822672880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55edcd65da7b4a569a4c27aab4819cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ded8bac-871f-491b-94ec-cb67c08bc828", "external-id": "nsx-vlan-transportzone-212", "segmentation_id": 212, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcff0fa7b-e0", "ovs_interfaceid": "cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.901661] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101174, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.130655} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.905373] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 905.906545] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db7ee5f6-1f55-4e56-85bf-684657b6635b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.937650] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] 0df15328-aebd-44c5-9c78-ee05f188ad95/0df15328-aebd-44c5-9c78-ee05f188ad95.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 905.942630] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4ea43fff-20f6-48dd-ab0d-882cf82133f4 tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Lock "b0b4ae9c-95d3-47a1-86a7-120c88b60704" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.976s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.944367] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b79dc47e-86c7-47cf-a19b-08b0581f4adb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.966602] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: e1710498-0616-4862-afc0-6e452dc19882] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 905.973154] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 905.973154] env[63538]: value = "task-5101175" [ 905.973154] env[63538]: _type = "Task" [ 905.973154] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.984759] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101175, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.985196] env[63538]: DEBUG nova.network.neutron [-] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.026287] env[63538]: DEBUG oslo_vmware.api [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101170, 'name': PowerOnVM_Task, 'duration_secs': 1.765661} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.027487] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 906.027592] env[63538]: INFO nova.compute.manager [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Took 10.60 seconds to spawn the instance on the hypervisor. [ 906.027753] env[63538]: DEBUG nova.compute.manager [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 906.028749] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e37d367-aa30-40ae-83e6-74b6658d5cda {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.129376] env[63538]: DEBUG nova.objects.base [None req-e1311821-f6df-4409-a7fc-507e335adeda tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=63538) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 906.168413] env[63538]: DEBUG oslo_vmware.api [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.181802] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Releasing lock "refresh_cache-edcc5700-7b1e-494a-82d1-844373a9d5a6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.182049] env[63538]: DEBUG nova.compute.manager [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Instance network_info: |[{"id": "cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189", "address": "fa:16:3e:2d:31:f0", "network": {"id": "549f45d6-3d4f-4476-92ba-4bd87fefba51", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-822672880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55edcd65da7b4a569a4c27aab4819cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ded8bac-871f-491b-94ec-cb67c08bc828", "external-id": "nsx-vlan-transportzone-212", "segmentation_id": 212, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcff0fa7b-e0", "ovs_interfaceid": "cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 906.183317] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:31:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ded8bac-871f-491b-94ec-cb67c08bc828', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 906.191850] env[63538]: DEBUG oslo.service.loopingcall [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 906.196417] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 906.196915] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-51834a54-0d7b-4878-951d-eeb06bff310f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.223699] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 906.223699] env[63538]: value = "task-5101176" [ 906.223699] env[63538]: _type = "Task" [ 906.223699] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.237342] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101176, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.319850] env[63538]: INFO nova.compute.manager [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Swapping old allocation on dict_keys(['f65218a4-1d3d-476a-9093-01cae92c8635']) held by migration 0c630b5a-3695-4f8a-95d5-b51ed38cf5ce for instance [ 906.362283] env[63538]: DEBUG nova.scheduler.client.report [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Overwriting current allocation {'allocations': {'f65218a4-1d3d-476a-9093-01cae92c8635': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 102}}, 'project_id': '3dc18da1ea704eeaaeb62633c4f76ee8', 'user_id': '9c4dcefd65de48a582ffb683637bda94', 'consumer_generation': 1} on consumer a2e036ae-318b-44ea-9db0-10fa3838728b {{(pid=63538) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 906.383247] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3351f02f-77ad-4e11-b6d0-82bb9f8387d8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.390948] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2136ade-573d-468b-b4f3-02e5f9a1a2da {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.402285] env[63538]: DEBUG nova.compute.manager [req-f32d4213-1109-406f-84af-bc451a51f6de req-96e73b0c-82ad-42f2-9c5c-d67bc9aa713a service nova] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Received event network-changed-cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 906.402652] env[63538]: DEBUG nova.compute.manager [req-f32d4213-1109-406f-84af-bc451a51f6de req-96e73b0c-82ad-42f2-9c5c-d67bc9aa713a service nova] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Refreshing instance network info cache due to event network-changed-cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 906.403105] env[63538]: DEBUG oslo_concurrency.lockutils [req-f32d4213-1109-406f-84af-bc451a51f6de req-96e73b0c-82ad-42f2-9c5c-d67bc9aa713a service nova] Acquiring lock "refresh_cache-edcc5700-7b1e-494a-82d1-844373a9d5a6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.403296] env[63538]: DEBUG oslo_concurrency.lockutils [req-f32d4213-1109-406f-84af-bc451a51f6de req-96e73b0c-82ad-42f2-9c5c-d67bc9aa713a service nova] Acquired lock "refresh_cache-edcc5700-7b1e-494a-82d1-844373a9d5a6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.403605] env[63538]: DEBUG nova.network.neutron [req-f32d4213-1109-406f-84af-bc451a51f6de req-96e73b0c-82ad-42f2-9c5c-d67bc9aa713a service nova] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Refreshing network info cache for port cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 906.439094] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88136739-caf8-400b-9446-6d98209786e0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.454043] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f878fa61-bca7-4198-b6bc-a84284029fc6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.471988] env[63538]: DEBUG nova.compute.provider_tree [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 906.474494] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 4b8fb9ad-a366-423d-81b1-04c5e4ec9264] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 906.488658] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101175, 'name': ReconfigVM_Task, 'duration_secs': 0.39615} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.489827] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Reconfigured VM instance instance-00000047 to attach disk [datastore2] 0df15328-aebd-44c5-9c78-ee05f188ad95/0df15328-aebd-44c5-9c78-ee05f188ad95.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 906.490714] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9bb67b35-97f2-443e-901b-d0a62685cd76 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.493145] env[63538]: INFO nova.compute.manager [-] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Took 1.32 seconds to deallocate network for instance. [ 906.505348] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 906.505348] env[63538]: value = "task-5101177" [ 906.505348] env[63538]: _type = "Task" [ 906.505348] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.509589] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "refresh_cache-a2e036ae-318b-44ea-9db0-10fa3838728b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.509876] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquired lock "refresh_cache-a2e036ae-318b-44ea-9db0-10fa3838728b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.510043] env[63538]: DEBUG nova.network.neutron [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 906.519542] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101177, 'name': Rename_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.560328] env[63538]: INFO nova.compute.manager [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Took 57.82 seconds to build instance. [ 906.673668] env[63538]: DEBUG oslo_vmware.api [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.683443] env[63538]: DEBUG oslo_concurrency.lockutils [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Acquiring lock "b0b4ae9c-95d3-47a1-86a7-120c88b60704" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.683818] env[63538]: DEBUG oslo_concurrency.lockutils [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Lock "b0b4ae9c-95d3-47a1-86a7-120c88b60704" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.684168] env[63538]: DEBUG oslo_concurrency.lockutils [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Acquiring lock "b0b4ae9c-95d3-47a1-86a7-120c88b60704-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.684452] env[63538]: DEBUG oslo_concurrency.lockutils [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Lock "b0b4ae9c-95d3-47a1-86a7-120c88b60704-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.684722] env[63538]: DEBUG oslo_concurrency.lockutils [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Lock "b0b4ae9c-95d3-47a1-86a7-120c88b60704-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.689535] env[63538]: INFO nova.compute.manager [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Terminating instance [ 906.694128] env[63538]: DEBUG nova.compute.manager [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 906.694431] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 906.696027] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adcbfa81-2e74-49e1-95a9-50d232ff643d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.704485] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 906.705640] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-db17bb10-f844-479e-b0ed-eba1c585143b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.713675] env[63538]: DEBUG oslo_vmware.api [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Waiting for the task: (returnval){ [ 906.713675] env[63538]: value = "task-5101178" [ 906.713675] env[63538]: _type = "Task" [ 906.713675] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.723487] env[63538]: DEBUG oslo_vmware.api [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Task: {'id': task-5101178, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.736279] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101176, 'name': CreateVM_Task, 'duration_secs': 0.385904} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.736540] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 906.737362] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.737599] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.737990] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 906.738309] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa86ab29-2eec-4e53-b9de-1d3da559d559 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.744172] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 906.744172] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ec878d-4390-7a8c-6e9b-e45b89c5c86d" [ 906.744172] env[63538]: _type = "Task" [ 906.744172] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.754873] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ec878d-4390-7a8c-6e9b-e45b89c5c86d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.792516] env[63538]: DEBUG nova.network.neutron [req-f32d4213-1109-406f-84af-bc451a51f6de req-96e73b0c-82ad-42f2-9c5c-d67bc9aa713a service nova] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Updated VIF entry in instance network info cache for port cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 906.793060] env[63538]: DEBUG nova.network.neutron [req-f32d4213-1109-406f-84af-bc451a51f6de req-96e73b0c-82ad-42f2-9c5c-d67bc9aa713a service nova] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Updating instance_info_cache with network_info: [{"id": "cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189", "address": "fa:16:3e:2d:31:f0", "network": {"id": "549f45d6-3d4f-4476-92ba-4bd87fefba51", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-822672880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55edcd65da7b4a569a4c27aab4819cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ded8bac-871f-491b-94ec-cb67c08bc828", "external-id": "nsx-vlan-transportzone-212", "segmentation_id": 212, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcff0fa7b-e0", "ovs_interfaceid": "cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.976082] env[63538]: DEBUG nova.scheduler.client.report [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 906.981586] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 15a8424e-27a6-4b77-b57c-d163345b8fed] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 907.005471] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.018212] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101177, 'name': Rename_Task, 'duration_secs': 0.164714} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.019380] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 907.020022] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-17d288c2-92d7-4893-acc6-2b7ac39017a4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.027692] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 907.027692] env[63538]: value = "task-5101179" [ 907.027692] env[63538]: _type = "Task" [ 907.027692] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.036448] env[63538]: DEBUG nova.network.neutron [None req-e1311821-f6df-4409-a7fc-507e335adeda tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Updating instance_info_cache with network_info: [{"id": "0d48de93-8e4f-4795-a582-f00e76e60047", "address": "fa:16:3e:ca:bf:a1", "network": {"id": "955bc6e2-cea0-4cf9-80fb-521ae0e565f0", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-726504029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9427981aac124f6aa0c4d8d45b0ae917", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c297fe21-cd0b-4226-813b-a65d2358d034", "external-id": "nsx-vlan-transportzone-98", "segmentation_id": 98, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d48de93-8e", "ovs_interfaceid": "0d48de93-8e4f-4795-a582-f00e76e60047", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.041206] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101179, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.063486] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a2d7809-7117-4d0d-8215-70fcb62ddad5 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "fa8ed101-914d-4751-ab9b-f68ad5da7a56" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.012s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.168532] env[63538]: DEBUG oslo_vmware.api [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.226502] env[63538]: DEBUG oslo_vmware.api [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Task: {'id': task-5101178, 'name': PowerOffVM_Task, 'duration_secs': 0.290598} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.226805] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 907.226911] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 907.227237] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea119f9e-4011-43d0-9924-1734e9cf034d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.255889] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ec878d-4390-7a8c-6e9b-e45b89c5c86d, 'name': SearchDatastore_Task, 'duration_secs': 0.013079} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.256297] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.256570] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 907.256891] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.257171] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.257415] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 907.257710] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af880ca1-9f9b-40ac-8e4d-bf75964c69e9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.269427] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 907.269660] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 907.270624] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b1c9a9d-e9eb-4840-a48d-afa7630d23c5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.277550] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 907.277550] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527482f3-2a91-4983-f7c5-7629416a0a88" [ 907.277550] env[63538]: _type = "Task" [ 907.277550] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.287746] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527482f3-2a91-4983-f7c5-7629416a0a88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.293174] env[63538]: DEBUG nova.network.neutron [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Updating instance_info_cache with network_info: [{"id": "11d7dbc5-d269-456b-9a7a-601759e64b51", "address": "fa:16:3e:45:35:dd", "network": {"id": "2d896d2c-3648-492b-9a02-0d84a652c424", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.81", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "589e84f0d68d4127baed4a6b24d18503", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11d7dbc5-d2", "ovs_interfaceid": "11d7dbc5-d269-456b-9a7a-601759e64b51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.298065] env[63538]: DEBUG oslo_concurrency.lockutils [req-f32d4213-1109-406f-84af-bc451a51f6de req-96e73b0c-82ad-42f2-9c5c-d67bc9aa713a service nova] Releasing lock "refresh_cache-edcc5700-7b1e-494a-82d1-844373a9d5a6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.298518] env[63538]: DEBUG nova.compute.manager [req-f32d4213-1109-406f-84af-bc451a51f6de req-96e73b0c-82ad-42f2-9c5c-d67bc9aa713a service nova] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Received event network-vif-deleted-39d5fcb4-d49b-4357-9446-9420f5d3407d {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 907.301836] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 907.302120] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 907.302358] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Deleting the datastore file [datastore1] b0b4ae9c-95d3-47a1-86a7-120c88b60704 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 907.302668] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-baddb2d5-a33a-46e9-a52c-a5101bc626cb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.311297] env[63538]: DEBUG oslo_vmware.api [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Waiting for the task: (returnval){ [ 907.311297] env[63538]: value = "task-5101181" [ 907.311297] env[63538]: _type = "Task" [ 907.311297] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.321355] env[63538]: DEBUG oslo_vmware.api [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Task: {'id': task-5101181, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.386515] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "1db1d558-2473-49cb-b309-f7192bd6b9c1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.386515] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "1db1d558-2473-49cb-b309-f7192bd6b9c1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.386716] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "1db1d558-2473-49cb-b309-f7192bd6b9c1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.387326] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "1db1d558-2473-49cb-b309-f7192bd6b9c1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.387326] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "1db1d558-2473-49cb-b309-f7192bd6b9c1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.389338] env[63538]: INFO nova.compute.manager [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Terminating instance [ 907.394208] env[63538]: DEBUG nova.compute.manager [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 907.395052] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 907.396122] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59de57fc-6149-4a60-8711-ed4da6e60b35 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.406785] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 907.406785] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-70c1dafc-33ed-42cb-86e1-176675c8e496 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.415267] env[63538]: DEBUG oslo_vmware.api [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 907.415267] env[63538]: value = "task-5101182" [ 907.415267] env[63538]: _type = "Task" [ 907.415267] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.427746] env[63538]: DEBUG oslo_vmware.api [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101182, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.481377] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.711s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.482103] env[63538]: DEBUG nova.compute.manager [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 907.486981] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.672s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.489278] env[63538]: INFO nova.compute.claims [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 907.494313] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 174368d1-9910-495b-a923-842e0440fd01] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 907.540952] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101179, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.542798] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e1311821-f6df-4409-a7fc-507e335adeda tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Releasing lock "refresh_cache-d5d557c6-3d4e-4122-8756-218c9757fa01" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.566443] env[63538]: DEBUG nova.compute.manager [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 907.676239] env[63538]: DEBUG oslo_vmware.api [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101162, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.691098] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "fa8ed101-914d-4751-ab9b-f68ad5da7a56" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.691420] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "fa8ed101-914d-4751-ab9b-f68ad5da7a56" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.691649] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "fa8ed101-914d-4751-ab9b-f68ad5da7a56-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.692251] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "fa8ed101-914d-4751-ab9b-f68ad5da7a56-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.692251] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "fa8ed101-914d-4751-ab9b-f68ad5da7a56-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.694562] env[63538]: INFO nova.compute.manager [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Terminating instance [ 907.696930] env[63538]: DEBUG nova.compute.manager [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 907.697238] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 907.698293] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af580269-a158-4d1c-92a7-019e91166f82 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.706551] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 907.708187] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e544e55a-1a7d-453c-aa28-c30a34de918f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.714198] env[63538]: DEBUG oslo_vmware.api [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 907.714198] env[63538]: value = "task-5101183" [ 907.714198] env[63538]: _type = "Task" [ 907.714198] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.723138] env[63538]: DEBUG oslo_vmware.api [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101183, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.789234] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527482f3-2a91-4983-f7c5-7629416a0a88, 'name': SearchDatastore_Task, 'duration_secs': 0.014827} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.790194] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2aef54f-84d6-4417-94e6-bb63152bd9cf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.796463] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 907.796463] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52881417-15c3-1361-6fcb-f7be2b42d61e" [ 907.796463] env[63538]: _type = "Task" [ 907.796463] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.796965] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Releasing lock "refresh_cache-a2e036ae-318b-44ea-9db0-10fa3838728b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.797430] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 907.797763] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1e4e5253-7ba7-45a1-bc5d-1b945be12f08 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.810030] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52881417-15c3-1361-6fcb-f7be2b42d61e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.811629] env[63538]: DEBUG oslo_vmware.api [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 907.811629] env[63538]: value = "task-5101184" [ 907.811629] env[63538]: _type = "Task" [ 907.811629] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.824660] env[63538]: DEBUG oslo_vmware.api [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Task: {'id': task-5101181, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.270931} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.828494] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 907.828833] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 907.829063] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 907.829302] env[63538]: INFO nova.compute.manager [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Took 1.13 seconds to destroy the instance on the hypervisor. [ 907.829636] env[63538]: DEBUG oslo.service.loopingcall [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 907.829898] env[63538]: DEBUG oslo_vmware.api [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101184, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.830157] env[63538]: DEBUG nova.compute.manager [-] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 907.830258] env[63538]: DEBUG nova.network.neutron [-] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 907.927420] env[63538]: DEBUG oslo_vmware.api [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101182, 'name': PowerOffVM_Task, 'duration_secs': 0.222151} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.927814] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 907.928101] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 907.928449] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ebd24096-6a8a-4cb5-9d15-d31fce9342fa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.001891] env[63538]: DEBUG nova.compute.utils [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 908.004321] env[63538]: DEBUG nova.compute.manager [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 908.004321] env[63538]: DEBUG nova.network.neutron [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 908.006440] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 908.006610] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Cleaning up deleted instances with incomplete migration {{(pid=63538) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11355}} [ 908.011049] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 908.011304] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 908.011395] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Deleting the datastore file [datastore2] 1db1d558-2473-49cb-b309-f7192bd6b9c1 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 908.011688] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca907da5-4e77-4292-833f-53f6c63a9cd1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.020821] env[63538]: DEBUG oslo_vmware.api [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 908.020821] env[63538]: value = "task-5101186" [ 908.020821] env[63538]: _type = "Task" [ 908.020821] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.032900] env[63538]: DEBUG oslo_vmware.api [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101186, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.043755] env[63538]: DEBUG oslo_vmware.api [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101179, 'name': PowerOnVM_Task, 'duration_secs': 0.531039} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.044130] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 908.044361] env[63538]: INFO nova.compute.manager [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Took 9.85 seconds to spawn the instance on the hypervisor. [ 908.044546] env[63538]: DEBUG nova.compute.manager [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 908.045515] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1311821-f6df-4409-a7fc-507e335adeda tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 908.046326] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf904c26-d617-4732-b7db-1c9dd23839a6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.050154] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1c722d83-a1c0-4508-a144-d39f08d3be36 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.065100] env[63538]: DEBUG nova.policy [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '734813d3e57a4a3cb92c6bd8321ea5af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7f1fac1e0f624a0382b8b73720fb4c7c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 908.068689] env[63538]: DEBUG oslo_vmware.api [None req-e1311821-f6df-4409-a7fc-507e335adeda tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 908.068689] env[63538]: value = "task-5101187" [ 908.068689] env[63538]: _type = "Task" [ 908.068689] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.086837] env[63538]: DEBUG oslo_vmware.api [None req-e1311821-f6df-4409-a7fc-507e335adeda tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101187, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.103047] env[63538]: DEBUG oslo_concurrency.lockutils [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.171791] env[63538]: DEBUG oslo_vmware.api [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101162, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.228620] env[63538]: DEBUG oslo_vmware.api [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101183, 'name': PowerOffVM_Task, 'duration_secs': 0.241927} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.229300] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 908.230114] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 908.230288] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-94687743-6324-41bb-bfba-546934406ba6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.259057] env[63538]: DEBUG nova.compute.manager [req-a8eb47a6-0ae7-49e8-b40b-bb2cc827852f req-4897c536-5952-4747-8f60-1460facb4f5f service nova] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Received event network-vif-deleted-2dc178e5-2d66-4747-ae40-c03f69eba8e8 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 908.259057] env[63538]: INFO nova.compute.manager [req-a8eb47a6-0ae7-49e8-b40b-bb2cc827852f req-4897c536-5952-4747-8f60-1460facb4f5f service nova] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Neutron deleted interface 2dc178e5-2d66-4747-ae40-c03f69eba8e8; detaching it from the instance and deleting it from the info cache [ 908.259057] env[63538]: DEBUG nova.network.neutron [req-a8eb47a6-0ae7-49e8-b40b-bb2cc827852f req-4897c536-5952-4747-8f60-1460facb4f5f service nova] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.311612] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52881417-15c3-1361-6fcb-f7be2b42d61e, 'name': SearchDatastore_Task, 'duration_secs': 0.020222} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.311968] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 908.312576] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] edcc5700-7b1e-494a-82d1-844373a9d5a6/edcc5700-7b1e-494a-82d1-844373a9d5a6.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 908.313276] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5adf41de-dca5-4515-bf34-74024694f2cb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.326525] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 908.327347] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 908.327734] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Deleting the datastore file [datastore2] fa8ed101-914d-4751-ab9b-f68ad5da7a56 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 908.329147] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-853306e2-1d6c-4b75-b637-d1840ad0af20 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.338579] env[63538]: DEBUG oslo_vmware.api [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101184, 'name': PowerOffVM_Task, 'duration_secs': 0.281512} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.338937] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 908.338937] env[63538]: value = "task-5101189" [ 908.338937] env[63538]: _type = "Task" [ 908.338937] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.339782] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 908.340714] env[63538]: DEBUG nova.virt.hardware [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:52:22Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='2a0f5711-293c-4327-a7c3-091f85550bf8',id=39,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-805814405',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 908.341049] env[63538]: DEBUG nova.virt.hardware [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 908.341315] env[63538]: DEBUG nova.virt.hardware [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 908.341583] env[63538]: DEBUG nova.virt.hardware [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 908.341797] env[63538]: DEBUG nova.virt.hardware [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 908.342033] env[63538]: DEBUG nova.virt.hardware [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 908.342350] env[63538]: DEBUG nova.virt.hardware [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 908.342813] env[63538]: DEBUG nova.virt.hardware [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 908.343080] env[63538]: DEBUG nova.virt.hardware [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 908.343348] env[63538]: DEBUG nova.virt.hardware [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 908.343599] env[63538]: DEBUG nova.virt.hardware [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 908.352585] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd624dc6-4e8f-47f0-9e21-a7e5b62a2421 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.368468] env[63538]: DEBUG oslo_vmware.api [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for the task: (returnval){ [ 908.368468] env[63538]: value = "task-5101190" [ 908.368468] env[63538]: _type = "Task" [ 908.368468] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.376705] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101189, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.378923] env[63538]: DEBUG oslo_vmware.api [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 908.378923] env[63538]: value = "task-5101191" [ 908.378923] env[63538]: _type = "Task" [ 908.378923] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.386557] env[63538]: DEBUG oslo_vmware.api [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101190, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.391294] env[63538]: DEBUG oslo_vmware.api [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101191, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.510387] env[63538]: DEBUG nova.compute.manager [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 908.515032] env[63538]: DEBUG nova.network.neutron [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Successfully created port: 3a12b458-0ee6-4994-89a1-0b04d9f01da3 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 908.517528] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 908.536689] env[63538]: DEBUG oslo_vmware.api [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101186, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.303102} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.540636] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 908.540991] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 908.541238] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 908.541602] env[63538]: INFO nova.compute.manager [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Took 1.15 seconds to destroy the instance on the hypervisor. [ 908.541705] env[63538]: DEBUG oslo.service.loopingcall [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 908.543679] env[63538]: DEBUG nova.compute.manager [-] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 908.543679] env[63538]: DEBUG nova.network.neutron [-] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 908.590560] env[63538]: INFO nova.compute.manager [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Took 50.61 seconds to build instance. [ 908.602031] env[63538]: DEBUG oslo_vmware.api [None req-e1311821-f6df-4409-a7fc-507e335adeda tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101187, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.673509] env[63538]: DEBUG oslo_vmware.api [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101162, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.705553] env[63538]: DEBUG nova.network.neutron [-] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.760670] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9eaa9a3b-774d-4982-8446-3c16135fb32d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.774045] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af216ea-bea8-468c-ac4c-ed32d6030fa3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.823603] env[63538]: DEBUG nova.compute.manager [req-a8eb47a6-0ae7-49e8-b40b-bb2cc827852f req-4897c536-5952-4747-8f60-1460facb4f5f service nova] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Detach interface failed, port_id=2dc178e5-2d66-4747-ae40-c03f69eba8e8, reason: Instance b0b4ae9c-95d3-47a1-86a7-120c88b60704 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 908.866534] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101189, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.883147] env[63538]: DEBUG oslo_vmware.api [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Task: {'id': task-5101190, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.315446} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.887443] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 908.887735] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 908.887942] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 908.888140] env[63538]: INFO nova.compute.manager [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Took 1.19 seconds to destroy the instance on the hypervisor. [ 908.888405] env[63538]: DEBUG oslo.service.loopingcall [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 908.892619] env[63538]: DEBUG nova.compute.manager [-] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 908.892822] env[63538]: DEBUG nova.network.neutron [-] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 908.907039] env[63538]: DEBUG oslo_vmware.api [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101191, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.087779] env[63538]: DEBUG oslo_vmware.api [None req-e1311821-f6df-4409-a7fc-507e335adeda tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101187, 'name': PowerOnVM_Task, 'duration_secs': 0.54811} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.091522] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1311821-f6df-4409-a7fc-507e335adeda tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 909.091522] env[63538]: DEBUG nova.compute.manager [None req-e1311821-f6df-4409-a7fc-507e335adeda tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 909.092334] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed9ae69-3621-40e2-a5dd-980f24ef0ef9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.095249] env[63538]: DEBUG oslo_concurrency.lockutils [None req-63291246-b1a0-4562-b6b0-7489dcd3f183 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "0df15328-aebd-44c5-9c78-ee05f188ad95" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.096s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.134766] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-295ba9f1-a0b8-4fc3-b525-2c64ffebf9b8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.145220] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-390a03e7-8cab-4599-ba1a-f1abf3407268 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.182992] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94962c6e-756f-4bcd-9b91-3908dbb1a8b9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.191646] env[63538]: DEBUG oslo_vmware.api [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101162, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.195094] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c3e2638-3170-4c1a-b483-3be831cb204d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.211907] env[63538]: INFO nova.compute.manager [-] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Took 1.38 seconds to deallocate network for instance. [ 909.212490] env[63538]: DEBUG nova.compute.provider_tree [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 909.363000] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101189, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.738862} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.363253] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] edcc5700-7b1e-494a-82d1-844373a9d5a6/edcc5700-7b1e-494a-82d1-844373a9d5a6.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 909.364117] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 909.364117] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1d0da9ef-cd9c-4c7b-ba8e-db423c1fc13e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.372022] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 909.372022] env[63538]: value = "task-5101192" [ 909.372022] env[63538]: _type = "Task" [ 909.372022] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.383104] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101192, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.392896] env[63538]: DEBUG nova.network.neutron [-] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.394302] env[63538]: DEBUG oslo_vmware.api [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101191, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.527511] env[63538]: DEBUG nova.compute.manager [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 909.554870] env[63538]: DEBUG nova.virt.hardware [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 909.555310] env[63538]: DEBUG nova.virt.hardware [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 909.555688] env[63538]: DEBUG nova.virt.hardware [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 909.555812] env[63538]: DEBUG nova.virt.hardware [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 909.555977] env[63538]: DEBUG nova.virt.hardware [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 909.556207] env[63538]: DEBUG nova.virt.hardware [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 909.556444] env[63538]: DEBUG nova.virt.hardware [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 909.556612] env[63538]: DEBUG nova.virt.hardware [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 909.556848] env[63538]: DEBUG nova.virt.hardware [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 909.556958] env[63538]: DEBUG nova.virt.hardware [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 909.557260] env[63538]: DEBUG nova.virt.hardware [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 909.558216] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e6eccec-901d-4ca2-8275-61920237b7eb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.568505] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c4d3c16-8881-41e8-a25f-d6be9d659397 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.598531] env[63538]: DEBUG nova.compute.manager [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 909.690169] env[63538]: DEBUG nova.network.neutron [-] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.694331] env[63538]: DEBUG oslo_vmware.api [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101162, 'name': ReconfigVM_Task, 'duration_secs': 7.723486} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.694331] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.694331] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Reconfigured VM to detach interface {{(pid=63538) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1984}} [ 909.716334] env[63538]: DEBUG nova.scheduler.client.report [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 909.722462] env[63538]: DEBUG oslo_concurrency.lockutils [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.882023] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101192, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.180282} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.883219] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 909.884299] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1235348-a457-44b6-bb7c-5df167f1a86a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.903017] env[63538]: INFO nova.compute.manager [-] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Took 1.36 seconds to deallocate network for instance. [ 909.912870] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] edcc5700-7b1e-494a-82d1-844373a9d5a6/edcc5700-7b1e-494a-82d1-844373a9d5a6.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 909.920318] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f08213fe-d414-43ca-bda5-6bd1ece33f64 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.942504] env[63538]: DEBUG oslo_vmware.api [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101191, 'name': ReconfigVM_Task, 'duration_secs': 1.195174} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.948170] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bacf65e9-5531-469c-8cfd-f1767058e72b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.954759] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 909.954759] env[63538]: value = "task-5101193" [ 909.954759] env[63538]: _type = "Task" [ 909.954759] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.975676] env[63538]: DEBUG nova.virt.hardware [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:52:22Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='2a0f5711-293c-4327-a7c3-091f85550bf8',id=39,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-805814405',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 909.975970] env[63538]: DEBUG nova.virt.hardware [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 909.976185] env[63538]: DEBUG nova.virt.hardware [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 909.976394] env[63538]: DEBUG nova.virt.hardware [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 909.976550] env[63538]: DEBUG nova.virt.hardware [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 909.976707] env[63538]: DEBUG nova.virt.hardware [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 909.976931] env[63538]: DEBUG nova.virt.hardware [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 909.977176] env[63538]: DEBUG nova.virt.hardware [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 909.977450] env[63538]: DEBUG nova.virt.hardware [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 909.977660] env[63538]: DEBUG nova.virt.hardware [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 909.977880] env[63538]: DEBUG nova.virt.hardware [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 909.979352] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9604b64-8b79-4315-a794-c64056c7221b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.988988] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101193, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.993364] env[63538]: DEBUG oslo_vmware.api [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 909.993364] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]528e708e-c6bb-02ff-525f-e36656947295" [ 909.993364] env[63538]: _type = "Task" [ 909.993364] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.003847] env[63538]: DEBUG oslo_vmware.api [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]528e708e-c6bb-02ff-525f-e36656947295, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.132987] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.193392] env[63538]: INFO nova.compute.manager [-] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Took 1.30 seconds to deallocate network for instance. [ 910.225302] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.738s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.225394] env[63538]: DEBUG nova.compute.manager [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 910.231041] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.990s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.234267] env[63538]: INFO nova.compute.claims [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 910.238172] env[63538]: DEBUG nova.compute.manager [req-e834d3c0-9874-48a4-846c-d7882fab8180 req-9c925075-5f86-4149-81aa-120d9dcfff12 service nova] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Received event network-vif-plugged-3a12b458-0ee6-4994-89a1-0b04d9f01da3 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 910.238397] env[63538]: DEBUG oslo_concurrency.lockutils [req-e834d3c0-9874-48a4-846c-d7882fab8180 req-9c925075-5f86-4149-81aa-120d9dcfff12 service nova] Acquiring lock "f1838794-710c-4bea-9e73-f6912e1b69f5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.238868] env[63538]: DEBUG oslo_concurrency.lockutils [req-e834d3c0-9874-48a4-846c-d7882fab8180 req-9c925075-5f86-4149-81aa-120d9dcfff12 service nova] Lock "f1838794-710c-4bea-9e73-f6912e1b69f5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.238868] env[63538]: DEBUG oslo_concurrency.lockutils [req-e834d3c0-9874-48a4-846c-d7882fab8180 req-9c925075-5f86-4149-81aa-120d9dcfff12 service nova] Lock "f1838794-710c-4bea-9e73-f6912e1b69f5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.238933] env[63538]: DEBUG nova.compute.manager [req-e834d3c0-9874-48a4-846c-d7882fab8180 req-9c925075-5f86-4149-81aa-120d9dcfff12 service nova] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] No waiting events found dispatching network-vif-plugged-3a12b458-0ee6-4994-89a1-0b04d9f01da3 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 910.239463] env[63538]: WARNING nova.compute.manager [req-e834d3c0-9874-48a4-846c-d7882fab8180 req-9c925075-5f86-4149-81aa-120d9dcfff12 service nova] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Received unexpected event network-vif-plugged-3a12b458-0ee6-4994-89a1-0b04d9f01da3 for instance with vm_state building and task_state spawning. [ 910.298538] env[63538]: DEBUG nova.compute.manager [req-8a46b90c-4611-493b-ab9d-7b674e114674 req-2674afc0-6940-4d37-a471-d5f63f254994 service nova] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Received event network-vif-deleted-ed76faed-4b61-4cd6-833c-46bbb80f49e3 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 910.298774] env[63538]: DEBUG nova.compute.manager [req-8a46b90c-4611-493b-ab9d-7b674e114674 req-2674afc0-6940-4d37-a471-d5f63f254994 service nova] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Received event network-vif-deleted-e650c787-b98c-4e3d-aa14-1a81d82c2c8b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 910.298989] env[63538]: DEBUG nova.compute.manager [req-8a46b90c-4611-493b-ab9d-7b674e114674 req-2674afc0-6940-4d37-a471-d5f63f254994 service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Received event network-vif-deleted-4c96df2c-ae17-49fe-84c1-d86dd4b46eb6 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 910.299408] env[63538]: INFO nova.compute.manager [req-8a46b90c-4611-493b-ab9d-7b674e114674 req-2674afc0-6940-4d37-a471-d5f63f254994 service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Neutron deleted interface 4c96df2c-ae17-49fe-84c1-d86dd4b46eb6; detaching it from the instance and deleting it from the info cache [ 910.299581] env[63538]: DEBUG nova.network.neutron [req-8a46b90c-4611-493b-ab9d-7b674e114674 req-2674afc0-6940-4d37-a471-d5f63f254994 service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Updating instance_info_cache with network_info: [{"id": "696fc25d-fa83-4793-bffa-6bd2ce56f489", "address": "fa:16:3e:60:55:b3", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.154", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap696fc25d-fa", "ovs_interfaceid": "696fc25d-fa83-4793-bffa-6bd2ce56f489", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.382043] env[63538]: DEBUG nova.network.neutron [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Successfully updated port: 3a12b458-0ee6-4994-89a1-0b04d9f01da3 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 910.454289] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.466984] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101193, 'name': ReconfigVM_Task, 'duration_secs': 0.429479} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.467344] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Reconfigured VM instance instance-00000048 to attach disk [datastore2] edcc5700-7b1e-494a-82d1-844373a9d5a6/edcc5700-7b1e-494a-82d1-844373a9d5a6.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 910.468111] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9fdc99f3-c58e-43d4-a565-fa8ebc6dc92c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.476011] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 910.476011] env[63538]: value = "task-5101194" [ 910.476011] env[63538]: _type = "Task" [ 910.476011] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.486490] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101194, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.503216] env[63538]: DEBUG oslo_vmware.api [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]528e708e-c6bb-02ff-525f-e36656947295, 'name': SearchDatastore_Task, 'duration_secs': 0.010112} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.509628] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Reconfiguring VM instance instance-00000033 to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 910.510060] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-15b255db-6781-49ed-b954-8b8e3f64be88 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.533865] env[63538]: DEBUG oslo_vmware.api [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 910.533865] env[63538]: value = "task-5101195" [ 910.533865] env[63538]: _type = "Task" [ 910.533865] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.544086] env[63538]: DEBUG oslo_vmware.api [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101195, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.703212] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.731325] env[63538]: DEBUG nova.compute.utils [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 910.732704] env[63538]: DEBUG nova.compute.manager [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 910.732890] env[63538]: DEBUG nova.network.neutron [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 910.802595] env[63538]: DEBUG oslo_concurrency.lockutils [req-8a46b90c-4611-493b-ab9d-7b674e114674 req-2674afc0-6940-4d37-a471-d5f63f254994 service nova] Acquiring lock "d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 910.803134] env[63538]: DEBUG oslo_concurrency.lockutils [req-8a46b90c-4611-493b-ab9d-7b674e114674 req-2674afc0-6940-4d37-a471-d5f63f254994 service nova] Acquired lock "d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.804268] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4145b6ce-9c1f-4206-b768-ea4a1bcf02a4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.826810] env[63538]: DEBUG oslo_concurrency.lockutils [req-8a46b90c-4611-493b-ab9d-7b674e114674 req-2674afc0-6940-4d37-a471-d5f63f254994 service nova] Releasing lock "d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.829019] env[63538]: WARNING nova.compute.manager [req-8a46b90c-4611-493b-ab9d-7b674e114674 req-2674afc0-6940-4d37-a471-d5f63f254994 service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Detach interface failed, port_id=4c96df2c-ae17-49fe-84c1-d86dd4b46eb6, reason: No device with interface-id 4c96df2c-ae17-49fe-84c1-d86dd4b46eb6 exists on VM: nova.exception.NotFound: No device with interface-id 4c96df2c-ae17-49fe-84c1-d86dd4b46eb6 exists on VM [ 910.829828] env[63538]: DEBUG nova.policy [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27848be71c3644b8920dd4c0fa39c23f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b78ab12ed0254cf4b3ccb7c231ca810d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 910.887425] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Acquiring lock "refresh_cache-f1838794-710c-4bea-9e73-f6912e1b69f5" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 910.887425] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Acquired lock "refresh_cache-f1838794-710c-4bea-9e73-f6912e1b69f5" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.887425] env[63538]: DEBUG nova.network.neutron [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 910.989316] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101194, 'name': Rename_Task, 'duration_secs': 0.24329} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.989658] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 910.990089] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-876e1e1f-86d9-4f0f-8653-db84e26df6a1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.999020] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 910.999020] env[63538]: value = "task-5101196" [ 910.999020] env[63538]: _type = "Task" [ 910.999020] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.011055] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101196, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.044878] env[63538]: DEBUG oslo_vmware.api [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101195, 'name': ReconfigVM_Task, 'duration_secs': 0.268032} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.045798] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Reconfigured VM instance instance-00000033 to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 911.046521] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd728bba-96f5-4f4b-802c-2d81c1fb339d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.073249] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] a2e036ae-318b-44ea-9db0-10fa3838728b/a2e036ae-318b-44ea-9db0-10fa3838728b.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 911.073613] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18e7f676-b1fb-4d4a-a1e1-40a22c55bb8d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.096827] env[63538]: DEBUG oslo_vmware.api [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 911.096827] env[63538]: value = "task-5101197" [ 911.096827] env[63538]: _type = "Task" [ 911.096827] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.112103] env[63538]: DEBUG oslo_vmware.api [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101197, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.140705] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "d967631f-5c8a-42d8-ac05-4cec3bdb55cf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.141049] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "d967631f-5c8a-42d8-ac05-4cec3bdb55cf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.142363] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "d967631f-5c8a-42d8-ac05-4cec3bdb55cf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.142363] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "d967631f-5c8a-42d8-ac05-4cec3bdb55cf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.142363] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "d967631f-5c8a-42d8-ac05-4cec3bdb55cf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.145058] env[63538]: INFO nova.compute.manager [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Terminating instance [ 911.151153] env[63538]: DEBUG nova.compute.manager [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 911.151153] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 911.151153] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-921219fc-d97e-42e8-a42b-71fd4cc83bb2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.161853] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 911.161853] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-90b0cc04-0861-4d4a-a071-38337a952d45 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.171824] env[63538]: DEBUG oslo_vmware.api [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 911.171824] env[63538]: value = "task-5101198" [ 911.171824] env[63538]: _type = "Task" [ 911.171824] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.183656] env[63538]: DEBUG oslo_vmware.api [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101198, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.242033] env[63538]: DEBUG nova.compute.manager [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 911.248518] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "refresh_cache-d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.248948] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "refresh_cache-d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.249932] env[63538]: DEBUG nova.network.neutron [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 911.334587] env[63538]: DEBUG nova.network.neutron [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Successfully created port: 312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 911.429958] env[63538]: DEBUG nova.network.neutron [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 911.511202] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101196, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.601764] env[63538]: DEBUG nova.network.neutron [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Updating instance_info_cache with network_info: [{"id": "3a12b458-0ee6-4994-89a1-0b04d9f01da3", "address": "fa:16:3e:52:e3:c2", "network": {"id": "676fedce-e24d-48c7-8075-6d3a8ece6c03", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1864286401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f1fac1e0f624a0382b8b73720fb4c7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a12b458-0e", "ovs_interfaceid": "3a12b458-0ee6-4994-89a1-0b04d9f01da3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.618511] env[63538]: DEBUG oslo_vmware.api [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101197, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.682847] env[63538]: DEBUG oslo_vmware.api [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101198, 'name': PowerOffVM_Task, 'duration_secs': 0.238255} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.683022] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 911.683281] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 911.683456] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5b003cf7-cb99-4e17-8f1e-dabd2cb27f8d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.760904] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 911.760904] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 911.760904] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Deleting the datastore file [datastore2] d967631f-5c8a-42d8-ac05-4cec3bdb55cf {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 911.761208] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce1e21b6-3158-4e46-b24e-4250399abae3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.769688] env[63538]: DEBUG oslo_vmware.api [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 911.769688] env[63538]: value = "task-5101200" [ 911.769688] env[63538]: _type = "Task" [ 911.769688] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.781740] env[63538]: DEBUG oslo_vmware.api [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101200, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.830593] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb564fc3-5ebd-4c2c-9876-9dd4f3b96000 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.840384] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e652633c-e7f9-44bf-b7a6-72bc232d8153 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.876933] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96269fa4-85b5-46db-8bd0-35dfd72d78a3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.886476] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865fb094-069d-4efe-8abf-ba4c5db583f5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.900383] env[63538]: DEBUG nova.compute.provider_tree [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.010544] env[63538]: DEBUG oslo_vmware.api [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101196, 'name': PowerOnVM_Task, 'duration_secs': 0.877549} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.010927] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 912.011183] env[63538]: INFO nova.compute.manager [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Took 8.45 seconds to spawn the instance on the hypervisor. [ 912.011375] env[63538]: DEBUG nova.compute.manager [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 912.012238] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f8284d7-3f87-44b7-bf90-ef5f6fe66f75 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.108149] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Releasing lock "refresh_cache-f1838794-710c-4bea-9e73-f6912e1b69f5" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.108624] env[63538]: DEBUG nova.compute.manager [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Instance network_info: |[{"id": "3a12b458-0ee6-4994-89a1-0b04d9f01da3", "address": "fa:16:3e:52:e3:c2", "network": {"id": "676fedce-e24d-48c7-8075-6d3a8ece6c03", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1864286401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f1fac1e0f624a0382b8b73720fb4c7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a12b458-0e", "ovs_interfaceid": "3a12b458-0ee6-4994-89a1-0b04d9f01da3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 912.112681] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:e3:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3a12b458-0ee6-4994-89a1-0b04d9f01da3', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 912.120883] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Creating folder: Project (7f1fac1e0f624a0382b8b73720fb4c7c). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 912.121273] env[63538]: DEBUG oslo_vmware.api [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101197, 'name': ReconfigVM_Task, 'duration_secs': 0.575761} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.121505] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-82e84580-f692-40e3-96bb-73c2da692dff {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.123760] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Reconfigured VM instance instance-00000033 to attach disk [datastore1] a2e036ae-318b-44ea-9db0-10fa3838728b/a2e036ae-318b-44ea-9db0-10fa3838728b.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 912.124708] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a496e18a-615f-442c-80c0-48927d2d3959 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.147640] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2922cd72-ca74-4c23-bd07-93e114f0f384 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.150765] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Created folder: Project (7f1fac1e0f624a0382b8b73720fb4c7c) in parent group-v992234. [ 912.150955] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Creating folder: Instances. Parent ref: group-v992434. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 912.151454] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d6fcebeb-bedc-4a1a-8b5a-4b3bd7d568e0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.171971] env[63538]: DEBUG nova.network.neutron [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Updating instance_info_cache with network_info: [{"id": "696fc25d-fa83-4793-bffa-6bd2ce56f489", "address": "fa:16:3e:60:55:b3", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.154", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap696fc25d-fa", "ovs_interfaceid": "696fc25d-fa83-4793-bffa-6bd2ce56f489", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.175039] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2affda60-a318-4c73-aabf-e643d54c3729 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.179156] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Created folder: Instances in parent group-v992434. [ 912.179156] env[63538]: DEBUG oslo.service.loopingcall [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 912.179633] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 912.180341] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-51935024-1fda-4480-a6e1-2e0661d37658 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.215152] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc2fc75-7557-4d31-ba62-2b7cc8c2378b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.220867] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 912.220867] env[63538]: value = "task-5101203" [ 912.220867] env[63538]: _type = "Task" [ 912.220867] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.229249] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 912.230393] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2406844f-6c61-473e-a12f-b1a8fcd35562 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.236471] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101203, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.243557] env[63538]: DEBUG oslo_vmware.api [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 912.243557] env[63538]: value = "task-5101204" [ 912.243557] env[63538]: _type = "Task" [ 912.243557] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.255738] env[63538]: DEBUG nova.compute.manager [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 912.258285] env[63538]: DEBUG oslo_vmware.api [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101204, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.281405] env[63538]: DEBUG oslo_vmware.api [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101200, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.291717] env[63538]: DEBUG nova.virt.hardware [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 912.292018] env[63538]: DEBUG nova.virt.hardware [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 912.292178] env[63538]: DEBUG nova.virt.hardware [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 912.292377] env[63538]: DEBUG nova.virt.hardware [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 912.292532] env[63538]: DEBUG nova.virt.hardware [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 912.292688] env[63538]: DEBUG nova.virt.hardware [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 912.293053] env[63538]: DEBUG nova.virt.hardware [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 912.293269] env[63538]: DEBUG nova.virt.hardware [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 912.293458] env[63538]: DEBUG nova.virt.hardware [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 912.293633] env[63538]: DEBUG nova.virt.hardware [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 912.293945] env[63538]: DEBUG nova.virt.hardware [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 912.294965] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f6c85fe-ff89-437e-8b38-cce64a5a7c5c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.306998] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5e3890d-5821-46dd-853f-f9f42c7db183 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.319707] env[63538]: DEBUG nova.compute.manager [req-f128e511-aeb5-4bdd-ac4a-91b36116b794 req-cd84aa3a-aaf0-4774-b1bc-74b8e023e829 service nova] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Received event network-changed-3a12b458-0ee6-4994-89a1-0b04d9f01da3 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 912.319974] env[63538]: DEBUG nova.compute.manager [req-f128e511-aeb5-4bdd-ac4a-91b36116b794 req-cd84aa3a-aaf0-4774-b1bc-74b8e023e829 service nova] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Refreshing instance network info cache due to event network-changed-3a12b458-0ee6-4994-89a1-0b04d9f01da3. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 912.320299] env[63538]: DEBUG oslo_concurrency.lockutils [req-f128e511-aeb5-4bdd-ac4a-91b36116b794 req-cd84aa3a-aaf0-4774-b1bc-74b8e023e829 service nova] Acquiring lock "refresh_cache-f1838794-710c-4bea-9e73-f6912e1b69f5" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.320526] env[63538]: DEBUG oslo_concurrency.lockutils [req-f128e511-aeb5-4bdd-ac4a-91b36116b794 req-cd84aa3a-aaf0-4774-b1bc-74b8e023e829 service nova] Acquired lock "refresh_cache-f1838794-710c-4bea-9e73-f6912e1b69f5" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.320717] env[63538]: DEBUG nova.network.neutron [req-f128e511-aeb5-4bdd-ac4a-91b36116b794 req-cd84aa3a-aaf0-4774-b1bc-74b8e023e829 service nova] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Refreshing network info cache for port 3a12b458-0ee6-4994-89a1-0b04d9f01da3 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 912.391945] env[63538]: DEBUG nova.compute.manager [req-a07dd3d9-69b0-4fa6-a1a8-336c12361ef2 req-50c00ca6-c29d-4051-b685-c63afd966287 service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Received event network-changed-8a332a90-393f-41ae-a924-4959c06e6207 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 912.392215] env[63538]: DEBUG nova.compute.manager [req-a07dd3d9-69b0-4fa6-a1a8-336c12361ef2 req-50c00ca6-c29d-4051-b685-c63afd966287 service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Refreshing instance network info cache due to event network-changed-8a332a90-393f-41ae-a924-4959c06e6207. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 912.392529] env[63538]: DEBUG oslo_concurrency.lockutils [req-a07dd3d9-69b0-4fa6-a1a8-336c12361ef2 req-50c00ca6-c29d-4051-b685-c63afd966287 service nova] Acquiring lock "refresh_cache-0df15328-aebd-44c5-9c78-ee05f188ad95" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.393185] env[63538]: DEBUG oslo_concurrency.lockutils [req-a07dd3d9-69b0-4fa6-a1a8-336c12361ef2 req-50c00ca6-c29d-4051-b685-c63afd966287 service nova] Acquired lock "refresh_cache-0df15328-aebd-44c5-9c78-ee05f188ad95" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.393185] env[63538]: DEBUG nova.network.neutron [req-a07dd3d9-69b0-4fa6-a1a8-336c12361ef2 req-50c00ca6-c29d-4051-b685-c63afd966287 service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Refreshing network info cache for port 8a332a90-393f-41ae-a924-4959c06e6207 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 912.403533] env[63538]: DEBUG nova.scheduler.client.report [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 912.534087] env[63538]: INFO nova.compute.manager [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Took 45.16 seconds to build instance. [ 912.680962] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "refresh_cache-d967631f-5c8a-42d8-ac05-4cec3bdb55cf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.738136] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101203, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.755353] env[63538]: DEBUG oslo_vmware.api [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101204, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.782148] env[63538]: DEBUG oslo_vmware.api [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101200, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.548919} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.782614] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 912.782945] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 912.783252] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 912.783562] env[63538]: INFO nova.compute.manager [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Took 1.63 seconds to destroy the instance on the hypervisor. [ 912.783943] env[63538]: DEBUG oslo.service.loopingcall [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 912.784223] env[63538]: DEBUG nova.compute.manager [-] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 912.784318] env[63538]: DEBUG nova.network.neutron [-] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 912.911123] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.681s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.911772] env[63538]: DEBUG nova.compute.manager [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 912.914713] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 34.424s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.914972] env[63538]: DEBUG nova.objects.instance [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63538) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 913.036321] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4550aa20-7bf3-4f9e-a9fa-cff244859969 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "edcc5700-7b1e-494a-82d1-844373a9d5a6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.672s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.060503] env[63538]: DEBUG nova.network.neutron [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Successfully updated port: 312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 913.186801] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b2d192a9-af8c-4bb1-ad3b-b78eecefbc43 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "interface-d967631f-5c8a-42d8-ac05-4cec3bdb55cf-4c96df2c-ae17-49fe-84c1-d86dd4b46eb6" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 12.116s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.236845] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101203, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.256710] env[63538]: DEBUG oslo_vmware.api [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101204, 'name': PowerOnVM_Task, 'duration_secs': 0.556465} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.257012] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 913.422990] env[63538]: DEBUG nova.compute.utils [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 913.433054] env[63538]: DEBUG nova.compute.manager [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 913.433054] env[63538]: DEBUG nova.network.neutron [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 913.549706] env[63538]: DEBUG nova.policy [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b5bf72236fc049b7a22b8a2e53e4d7cd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6906dcd3e0074931bdbe4233fbc2bf95', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 913.553088] env[63538]: DEBUG nova.network.neutron [req-f128e511-aeb5-4bdd-ac4a-91b36116b794 req-cd84aa3a-aaf0-4774-b1bc-74b8e023e829 service nova] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Updated VIF entry in instance network info cache for port 3a12b458-0ee6-4994-89a1-0b04d9f01da3. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 913.553591] env[63538]: DEBUG nova.network.neutron [req-f128e511-aeb5-4bdd-ac4a-91b36116b794 req-cd84aa3a-aaf0-4774-b1bc-74b8e023e829 service nova] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Updating instance_info_cache with network_info: [{"id": "3a12b458-0ee6-4994-89a1-0b04d9f01da3", "address": "fa:16:3e:52:e3:c2", "network": {"id": "676fedce-e24d-48c7-8075-6d3a8ece6c03", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1864286401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f1fac1e0f624a0382b8b73720fb4c7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a12b458-0e", "ovs_interfaceid": "3a12b458-0ee6-4994-89a1-0b04d9f01da3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.564320] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Acquiring lock "refresh_cache-e447c109-4cef-4cc7-9acf-61abc0f47482" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 913.564529] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Acquired lock "refresh_cache-e447c109-4cef-4cc7-9acf-61abc0f47482" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.564666] env[63538]: DEBUG nova.network.neutron [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 913.635974] env[63538]: DEBUG nova.network.neutron [req-a07dd3d9-69b0-4fa6-a1a8-336c12361ef2 req-50c00ca6-c29d-4051-b685-c63afd966287 service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Updated VIF entry in instance network info cache for port 8a332a90-393f-41ae-a924-4959c06e6207. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 913.637743] env[63538]: DEBUG nova.network.neutron [req-a07dd3d9-69b0-4fa6-a1a8-336c12361ef2 req-50c00ca6-c29d-4051-b685-c63afd966287 service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Updating instance_info_cache with network_info: [{"id": "8a332a90-393f-41ae-a924-4959c06e6207", "address": "fa:16:3e:37:0e:9d", "network": {"id": "8fd1ecf4-3813-4ca4-82b4-8ba3f04e3c41", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1232144260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fe11c1386b14d139f4416cbf20fb201", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a332a90-39", "ovs_interfaceid": "8a332a90-393f-41ae-a924-4959c06e6207", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.739181] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101203, 'name': CreateVM_Task, 'duration_secs': 1.397879} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.739407] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 913.740568] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 913.740568] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.740902] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 913.741413] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ccd1c56-c28b-4ba9-9aa6-a6d03abf9615 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.748163] env[63538]: DEBUG oslo_vmware.api [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Waiting for the task: (returnval){ [ 913.748163] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52487eb3-176b-eec1-12a4-301c9f67964d" [ 913.748163] env[63538]: _type = "Task" [ 913.748163] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.769955] env[63538]: DEBUG oslo_vmware.api [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52487eb3-176b-eec1-12a4-301c9f67964d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.931156] env[63538]: DEBUG nova.compute.manager [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 913.935383] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9be402a5-fa43-4c12-9f89-4ad67ea83a17 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.021s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.937573] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.328s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.938902] env[63538]: DEBUG nova.objects.instance [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lazy-loading 'resources' on Instance uuid 9c1f7da8-59f6-45bc-8d5f-23c8ec760829 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 913.964059] env[63538]: DEBUG nova.network.neutron [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Successfully created port: d5a8d000-6995-4d04-b7ff-431a2456a13e {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 914.057017] env[63538]: DEBUG oslo_concurrency.lockutils [req-f128e511-aeb5-4bdd-ac4a-91b36116b794 req-cd84aa3a-aaf0-4774-b1bc-74b8e023e829 service nova] Releasing lock "refresh_cache-f1838794-710c-4bea-9e73-f6912e1b69f5" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.106341] env[63538]: DEBUG nova.network.neutron [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 914.141033] env[63538]: DEBUG oslo_concurrency.lockutils [req-a07dd3d9-69b0-4fa6-a1a8-336c12361ef2 req-50c00ca6-c29d-4051-b685-c63afd966287 service nova] Releasing lock "refresh_cache-0df15328-aebd-44c5-9c78-ee05f188ad95" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.263574] env[63538]: DEBUG oslo_vmware.api [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52487eb3-176b-eec1-12a4-301c9f67964d, 'name': SearchDatastore_Task, 'duration_secs': 0.020588} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.263908] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.265051] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 914.266025] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.266025] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.266025] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 914.266193] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1fb3f099-6c5f-4f67-ac50-3d21ac12ff73 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.272282] env[63538]: INFO nova.compute.manager [None req-0b59a3cd-8ee8-4508-b2b4-e128c8b36f98 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Updating instance to original state: 'active' [ 914.279357] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 914.279357] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 914.280938] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4b00198-d194-41d1-90d6-0a11b7859c3e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.289497] env[63538]: DEBUG oslo_vmware.api [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Waiting for the task: (returnval){ [ 914.289497] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5293aef2-2051-d342-3cdb-8a75a36d4fd1" [ 914.289497] env[63538]: _type = "Task" [ 914.289497] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.301453] env[63538]: DEBUG oslo_vmware.api [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5293aef2-2051-d342-3cdb-8a75a36d4fd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.318413] env[63538]: DEBUG nova.network.neutron [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Updating instance_info_cache with network_info: [{"id": "312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b", "address": "fa:16:3e:0b:f9:6b", "network": {"id": "268f8bc3-cc14-4aad-aa44-87cd6e506681", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-818769698-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b78ab12ed0254cf4b3ccb7c231ca810d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap312c7ab1-93", "ovs_interfaceid": "312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.321818] env[63538]: DEBUG nova.network.neutron [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Successfully created port: 077e00c6-2e89-440c-8653-8742862e8000 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 914.405501] env[63538]: DEBUG nova.compute.manager [req-47f582d7-28c8-4f39-9b21-8d2ac7016dd8 req-a2ebabf7-654b-4ca5-b4dd-12cd8c3e7f1f service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Received event network-vif-plugged-312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 914.405501] env[63538]: DEBUG oslo_concurrency.lockutils [req-47f582d7-28c8-4f39-9b21-8d2ac7016dd8 req-a2ebabf7-654b-4ca5-b4dd-12cd8c3e7f1f service nova] Acquiring lock "e447c109-4cef-4cc7-9acf-61abc0f47482-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.405501] env[63538]: DEBUG oslo_concurrency.lockutils [req-47f582d7-28c8-4f39-9b21-8d2ac7016dd8 req-a2ebabf7-654b-4ca5-b4dd-12cd8c3e7f1f service nova] Lock "e447c109-4cef-4cc7-9acf-61abc0f47482-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.405501] env[63538]: DEBUG oslo_concurrency.lockutils [req-47f582d7-28c8-4f39-9b21-8d2ac7016dd8 req-a2ebabf7-654b-4ca5-b4dd-12cd8c3e7f1f service nova] Lock "e447c109-4cef-4cc7-9acf-61abc0f47482-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.405501] env[63538]: DEBUG nova.compute.manager [req-47f582d7-28c8-4f39-9b21-8d2ac7016dd8 req-a2ebabf7-654b-4ca5-b4dd-12cd8c3e7f1f service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] No waiting events found dispatching network-vif-plugged-312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 914.405501] env[63538]: WARNING nova.compute.manager [req-47f582d7-28c8-4f39-9b21-8d2ac7016dd8 req-a2ebabf7-654b-4ca5-b4dd-12cd8c3e7f1f service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Received unexpected event network-vif-plugged-312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b for instance with vm_state building and task_state spawning. [ 914.406302] env[63538]: DEBUG nova.compute.manager [req-47f582d7-28c8-4f39-9b21-8d2ac7016dd8 req-a2ebabf7-654b-4ca5-b4dd-12cd8c3e7f1f service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Received event network-changed-312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 914.406627] env[63538]: DEBUG nova.compute.manager [req-47f582d7-28c8-4f39-9b21-8d2ac7016dd8 req-a2ebabf7-654b-4ca5-b4dd-12cd8c3e7f1f service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Refreshing instance network info cache due to event network-changed-312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 914.406969] env[63538]: DEBUG oslo_concurrency.lockutils [req-47f582d7-28c8-4f39-9b21-8d2ac7016dd8 req-a2ebabf7-654b-4ca5-b4dd-12cd8c3e7f1f service nova] Acquiring lock "refresh_cache-e447c109-4cef-4cc7-9acf-61abc0f47482" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.411388] env[63538]: DEBUG nova.network.neutron [-] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.433439] env[63538]: DEBUG nova.compute.manager [req-5f037345-25c9-440d-b9b1-32e116edf67a req-2d13b350-3b4f-47c7-b77a-355edfb4d5b3 service nova] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Received event network-changed-cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 914.434029] env[63538]: DEBUG nova.compute.manager [req-5f037345-25c9-440d-b9b1-32e116edf67a req-2d13b350-3b4f-47c7-b77a-355edfb4d5b3 service nova] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Refreshing instance network info cache due to event network-changed-cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 914.434029] env[63538]: DEBUG oslo_concurrency.lockutils [req-5f037345-25c9-440d-b9b1-32e116edf67a req-2d13b350-3b4f-47c7-b77a-355edfb4d5b3 service nova] Acquiring lock "refresh_cache-edcc5700-7b1e-494a-82d1-844373a9d5a6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.434349] env[63538]: DEBUG oslo_concurrency.lockutils [req-5f037345-25c9-440d-b9b1-32e116edf67a req-2d13b350-3b4f-47c7-b77a-355edfb4d5b3 service nova] Acquired lock "refresh_cache-edcc5700-7b1e-494a-82d1-844373a9d5a6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.435307] env[63538]: DEBUG nova.network.neutron [req-5f037345-25c9-440d-b9b1-32e116edf67a req-2d13b350-3b4f-47c7-b77a-355edfb4d5b3 service nova] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Refreshing network info cache for port cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 914.801377] env[63538]: DEBUG oslo_vmware.api [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5293aef2-2051-d342-3cdb-8a75a36d4fd1, 'name': SearchDatastore_Task, 'duration_secs': 0.022086} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.805305] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dee6534e-ab13-41de-9c7b-2c5ffbe182ce {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.812826] env[63538]: DEBUG oslo_vmware.api [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Waiting for the task: (returnval){ [ 914.812826] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d522c8-22cd-1063-7ac0-307ad4dde36d" [ 914.812826] env[63538]: _type = "Task" [ 914.812826] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.824900] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Releasing lock "refresh_cache-e447c109-4cef-4cc7-9acf-61abc0f47482" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.824900] env[63538]: DEBUG nova.compute.manager [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Instance network_info: |[{"id": "312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b", "address": "fa:16:3e:0b:f9:6b", "network": {"id": "268f8bc3-cc14-4aad-aa44-87cd6e506681", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-818769698-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b78ab12ed0254cf4b3ccb7c231ca810d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap312c7ab1-93", "ovs_interfaceid": "312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 914.825346] env[63538]: DEBUG oslo_vmware.api [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d522c8-22cd-1063-7ac0-307ad4dde36d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.828866] env[63538]: DEBUG oslo_concurrency.lockutils [req-47f582d7-28c8-4f39-9b21-8d2ac7016dd8 req-a2ebabf7-654b-4ca5-b4dd-12cd8c3e7f1f service nova] Acquired lock "refresh_cache-e447c109-4cef-4cc7-9acf-61abc0f47482" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.828866] env[63538]: DEBUG nova.network.neutron [req-47f582d7-28c8-4f39-9b21-8d2ac7016dd8 req-a2ebabf7-654b-4ca5-b4dd-12cd8c3e7f1f service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Refreshing network info cache for port 312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 914.830527] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:f9:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc1e16db-ad3b-4b7f-ab64-4609c87abac0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 914.838676] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Creating folder: Project (b78ab12ed0254cf4b3ccb7c231ca810d). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 914.842920] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a33e309c-c16f-48eb-ba44-4b81c2326a7f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.856789] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Created folder: Project (b78ab12ed0254cf4b3ccb7c231ca810d) in parent group-v992234. [ 914.857025] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Creating folder: Instances. Parent ref: group-v992437. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 914.859857] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d230dd4a-5127-4dd0-9a79-36637c77eb4d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.871915] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Created folder: Instances in parent group-v992437. [ 914.872895] env[63538]: DEBUG oslo.service.loopingcall [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 914.872895] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 914.872895] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b6f4e7f3-e323-4b40-80bb-2575fbff93d5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.898961] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 914.898961] env[63538]: value = "task-5101207" [ 914.898961] env[63538]: _type = "Task" [ 914.898961] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.911035] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101207, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.915178] env[63538]: INFO nova.compute.manager [-] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Took 2.13 seconds to deallocate network for instance. [ 914.951867] env[63538]: DEBUG nova.compute.manager [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 915.001256] env[63538]: DEBUG nova.virt.hardware [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 915.001548] env[63538]: DEBUG nova.virt.hardware [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 915.001715] env[63538]: DEBUG nova.virt.hardware [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 915.001971] env[63538]: DEBUG nova.virt.hardware [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 915.002072] env[63538]: DEBUG nova.virt.hardware [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 915.002241] env[63538]: DEBUG nova.virt.hardware [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 915.002460] env[63538]: DEBUG nova.virt.hardware [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 915.002627] env[63538]: DEBUG nova.virt.hardware [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 915.002804] env[63538]: DEBUG nova.virt.hardware [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 915.002975] env[63538]: DEBUG nova.virt.hardware [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 915.003259] env[63538]: DEBUG nova.virt.hardware [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 915.004136] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f9e71be-f056-4c29-9405-29c2bc0a3622 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.015663] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b343fd5-1d78-4d30-8da5-3026178faef6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.020312] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56fdc507-2de3-484f-b2b5-1455b63bba78 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.040979] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72db3166-2d58-42f6-a545-579e7c43bc21 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.079647] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63afb218-fa6a-438d-90ae-156a867b331f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.093150] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56236434-fddd-4923-864e-92f3b3870967 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.116735] env[63538]: DEBUG nova.compute.provider_tree [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 915.268894] env[63538]: DEBUG nova.network.neutron [req-5f037345-25c9-440d-b9b1-32e116edf67a req-2d13b350-3b4f-47c7-b77a-355edfb4d5b3 service nova] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Updated VIF entry in instance network info cache for port cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 915.269352] env[63538]: DEBUG nova.network.neutron [req-5f037345-25c9-440d-b9b1-32e116edf67a req-2d13b350-3b4f-47c7-b77a-355edfb4d5b3 service nova] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Updating instance_info_cache with network_info: [{"id": "cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189", "address": "fa:16:3e:2d:31:f0", "network": {"id": "549f45d6-3d4f-4476-92ba-4bd87fefba51", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-822672880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55edcd65da7b4a569a4c27aab4819cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ded8bac-871f-491b-94ec-cb67c08bc828", "external-id": "nsx-vlan-transportzone-212", "segmentation_id": 212, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcff0fa7b-e0", "ovs_interfaceid": "cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.327012] env[63538]: DEBUG oslo_vmware.api [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d522c8-22cd-1063-7ac0-307ad4dde36d, 'name': SearchDatastore_Task, 'duration_secs': 0.02183} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.327396] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.327729] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] f1838794-710c-4bea-9e73-f6912e1b69f5/f1838794-710c-4bea-9e73-f6912e1b69f5.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 915.328063] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-862ce53e-f979-450f-8501-67212ea4b683 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.338710] env[63538]: DEBUG oslo_vmware.api [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Waiting for the task: (returnval){ [ 915.338710] env[63538]: value = "task-5101208" [ 915.338710] env[63538]: _type = "Task" [ 915.338710] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.349446] env[63538]: DEBUG oslo_vmware.api [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Task: {'id': task-5101208, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.413341] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101207, 'name': CreateVM_Task, 'duration_secs': 0.488192} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.413537] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 915.414350] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.414539] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.415057] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 915.415416] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8aa8d9e-5217-49fc-ba81-3408dd55b7c0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.422520] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.423000] env[63538]: DEBUG oslo_vmware.api [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Waiting for the task: (returnval){ [ 915.423000] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525ce1ae-3b56-3449-b6d5-65c4108ccae7" [ 915.423000] env[63538]: _type = "Task" [ 915.423000] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.433443] env[63538]: DEBUG oslo_vmware.api [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525ce1ae-3b56-3449-b6d5-65c4108ccae7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.580913] env[63538]: DEBUG oslo_vmware.rw_handles [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cd8c0c-9dd6-d3b4-f85d-a19b54df3c66/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 915.582159] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b83f0b9-f2cc-477d-9889-112815f81977 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.589957] env[63538]: DEBUG oslo_vmware.rw_handles [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cd8c0c-9dd6-d3b4-f85d-a19b54df3c66/disk-0.vmdk is in state: ready. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 915.590227] env[63538]: ERROR oslo_vmware.rw_handles [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cd8c0c-9dd6-d3b4-f85d-a19b54df3c66/disk-0.vmdk due to incomplete transfer. [ 915.590527] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4691166e-62e8-4fd4-8349-e8c3b3334d39 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.599436] env[63538]: DEBUG oslo_vmware.rw_handles [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cd8c0c-9dd6-d3b4-f85d-a19b54df3c66/disk-0.vmdk. {{(pid=63538) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 915.599692] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Uploaded image 375e564f-7253-420e-99be-0bda90d5793d to the Glance image server {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 915.602270] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Destroying the VM {{(pid=63538) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 915.603272] env[63538]: DEBUG nova.network.neutron [req-47f582d7-28c8-4f39-9b21-8d2ac7016dd8 req-a2ebabf7-654b-4ca5-b4dd-12cd8c3e7f1f service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Updated VIF entry in instance network info cache for port 312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 915.603666] env[63538]: DEBUG nova.network.neutron [req-47f582d7-28c8-4f39-9b21-8d2ac7016dd8 req-a2ebabf7-654b-4ca5-b4dd-12cd8c3e7f1f service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Updating instance_info_cache with network_info: [{"id": "312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b", "address": "fa:16:3e:0b:f9:6b", "network": {"id": "268f8bc3-cc14-4aad-aa44-87cd6e506681", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-818769698-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b78ab12ed0254cf4b3ccb7c231ca810d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap312c7ab1-93", "ovs_interfaceid": "312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.605212] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-407bcd7b-8918-44b7-8041-11d09dbea6ba {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.614384] env[63538]: DEBUG oslo_vmware.api [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 915.614384] env[63538]: value = "task-5101209" [ 915.614384] env[63538]: _type = "Task" [ 915.614384] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.627248] env[63538]: DEBUG oslo_vmware.api [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101209, 'name': Destroy_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.644331] env[63538]: ERROR nova.scheduler.client.report [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [req-0b070f2b-e657-4224-b833-25a029e2754d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f65218a4-1d3d-476a-9093-01cae92c8635. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0b070f2b-e657-4224-b833-25a029e2754d"}]} [ 915.662322] env[63538]: DEBUG nova.scheduler.client.report [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Refreshing inventories for resource provider f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 915.678052] env[63538]: DEBUG nova.scheduler.client.report [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Updating ProviderTree inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 915.678449] env[63538]: DEBUG nova.compute.provider_tree [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 915.690882] env[63538]: DEBUG nova.scheduler.client.report [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Refreshing aggregate associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, aggregates: None {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 915.713051] env[63538]: DEBUG nova.scheduler.client.report [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Refreshing trait associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 915.773995] env[63538]: DEBUG oslo_concurrency.lockutils [req-5f037345-25c9-440d-b9b1-32e116edf67a req-2d13b350-3b4f-47c7-b77a-355edfb4d5b3 service nova] Releasing lock "refresh_cache-edcc5700-7b1e-494a-82d1-844373a9d5a6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.774550] env[63538]: DEBUG nova.compute.manager [req-5f037345-25c9-440d-b9b1-32e116edf67a req-2d13b350-3b4f-47c7-b77a-355edfb4d5b3 service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Received event network-vif-deleted-696fc25d-fa83-4793-bffa-6bd2ce56f489 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 915.774871] env[63538]: INFO nova.compute.manager [req-5f037345-25c9-440d-b9b1-32e116edf67a req-2d13b350-3b4f-47c7-b77a-355edfb4d5b3 service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Neutron deleted interface 696fc25d-fa83-4793-bffa-6bd2ce56f489; detaching it from the instance and deleting it from the info cache [ 915.775174] env[63538]: DEBUG nova.network.neutron [req-5f037345-25c9-440d-b9b1-32e116edf67a req-2d13b350-3b4f-47c7-b77a-355edfb4d5b3 service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.850653] env[63538]: DEBUG oslo_vmware.api [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Task: {'id': task-5101208, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.944456] env[63538]: DEBUG oslo_vmware.api [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525ce1ae-3b56-3449-b6d5-65c4108ccae7, 'name': SearchDatastore_Task, 'duration_secs': 0.014724} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.944890] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.945166] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 915.945494] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.945695] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.946228] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 915.946352] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f8c0b26-fa5a-4f37-96ca-1ffaa666bae4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.961601] env[63538]: DEBUG nova.compute.manager [req-1a3212d2-3b3d-4abc-ba41-b65156e3e563 req-20fb85b1-d9d8-46de-bc01-ed4da32c90e7 service nova] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Received event network-vif-plugged-d5a8d000-6995-4d04-b7ff-431a2456a13e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 915.962086] env[63538]: DEBUG oslo_concurrency.lockutils [req-1a3212d2-3b3d-4abc-ba41-b65156e3e563 req-20fb85b1-d9d8-46de-bc01-ed4da32c90e7 service nova] Acquiring lock "b47925eb-3d97-415b-9410-2e325da5ce79-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.962543] env[63538]: DEBUG oslo_concurrency.lockutils [req-1a3212d2-3b3d-4abc-ba41-b65156e3e563 req-20fb85b1-d9d8-46de-bc01-ed4da32c90e7 service nova] Lock "b47925eb-3d97-415b-9410-2e325da5ce79-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.962843] env[63538]: DEBUG oslo_concurrency.lockutils [req-1a3212d2-3b3d-4abc-ba41-b65156e3e563 req-20fb85b1-d9d8-46de-bc01-ed4da32c90e7 service nova] Lock "b47925eb-3d97-415b-9410-2e325da5ce79-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.963190] env[63538]: DEBUG nova.compute.manager [req-1a3212d2-3b3d-4abc-ba41-b65156e3e563 req-20fb85b1-d9d8-46de-bc01-ed4da32c90e7 service nova] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] No waiting events found dispatching network-vif-plugged-d5a8d000-6995-4d04-b7ff-431a2456a13e {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 915.963463] env[63538]: WARNING nova.compute.manager [req-1a3212d2-3b3d-4abc-ba41-b65156e3e563 req-20fb85b1-d9d8-46de-bc01-ed4da32c90e7 service nova] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Received unexpected event network-vif-plugged-d5a8d000-6995-4d04-b7ff-431a2456a13e for instance with vm_state building and task_state spawning. [ 915.971905] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 915.971905] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 915.972912] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60e7f3c1-560e-4f5a-8897-9a6cfe1e355d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.986250] env[63538]: DEBUG oslo_vmware.api [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Waiting for the task: (returnval){ [ 915.986250] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521aca11-fbf5-0400-7343-503144e1c287" [ 915.986250] env[63538]: _type = "Task" [ 915.986250] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.000971] env[63538]: DEBUG oslo_vmware.api [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521aca11-fbf5-0400-7343-503144e1c287, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.111396] env[63538]: DEBUG oslo_concurrency.lockutils [req-47f582d7-28c8-4f39-9b21-8d2ac7016dd8 req-a2ebabf7-654b-4ca5-b4dd-12cd8c3e7f1f service nova] Releasing lock "refresh_cache-e447c109-4cef-4cc7-9acf-61abc0f47482" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 916.125725] env[63538]: DEBUG oslo_vmware.api [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101209, 'name': Destroy_Task} progress is 33%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.214581] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e668f200-7a6a-4936-ab47-962e52a2b5a4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.224703] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e40bef-7a29-4dc3-835e-fa16e5427866 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.261974] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7581601a-9b76-48cd-af52-3480396c9fa8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.270718] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d73f3b-1f2f-4e9f-97a9-ae3525c46738 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.277820] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-57482599-0a74-4d52-9d20-44630dee54b0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.291409] env[63538]: DEBUG nova.compute.provider_tree [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 916.302935] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a86d5bb-95f4-4889-8621-6ee0f7733b5d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.343221] env[63538]: DEBUG nova.compute.manager [req-5f037345-25c9-440d-b9b1-32e116edf67a req-2d13b350-3b4f-47c7-b77a-355edfb4d5b3 service nova] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Detach interface failed, port_id=696fc25d-fa83-4793-bffa-6bd2ce56f489, reason: Instance d967631f-5c8a-42d8-ac05-4cec3bdb55cf could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 916.353110] env[63538]: DEBUG oslo_vmware.api [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Task: {'id': task-5101208, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.926704} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.353416] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] f1838794-710c-4bea-9e73-f6912e1b69f5/f1838794-710c-4bea-9e73-f6912e1b69f5.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 916.353644] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 916.353981] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e02f860d-c332-473f-a282-887558d98169 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.363755] env[63538]: DEBUG oslo_vmware.api [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Waiting for the task: (returnval){ [ 916.363755] env[63538]: value = "task-5101210" [ 916.363755] env[63538]: _type = "Task" [ 916.363755] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.373721] env[63538]: DEBUG oslo_vmware.api [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Task: {'id': task-5101210, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.498887] env[63538]: DEBUG oslo_vmware.api [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521aca11-fbf5-0400-7343-503144e1c287, 'name': SearchDatastore_Task, 'duration_secs': 0.086203} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.499872] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8dc90c6-ee7e-4f7e-b5b5-5c41032ef65e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.504393] env[63538]: DEBUG nova.network.neutron [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Successfully updated port: d5a8d000-6995-4d04-b7ff-431a2456a13e {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 916.506917] env[63538]: DEBUG oslo_vmware.api [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Waiting for the task: (returnval){ [ 916.506917] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52382481-f6a4-8553-7f94-726d48afac1f" [ 916.506917] env[63538]: _type = "Task" [ 916.506917] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.518096] env[63538]: DEBUG oslo_vmware.api [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52382481-f6a4-8553-7f94-726d48afac1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.530224] env[63538]: DEBUG nova.compute.manager [req-7af055b9-0895-497c-a53e-f813436147db req-4c8ea05b-0a50-4e78-a9ea-61db07bfe3c7 service nova] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Received event network-changed-d5a8d000-6995-4d04-b7ff-431a2456a13e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 916.530447] env[63538]: DEBUG nova.compute.manager [req-7af055b9-0895-497c-a53e-f813436147db req-4c8ea05b-0a50-4e78-a9ea-61db07bfe3c7 service nova] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Refreshing instance network info cache due to event network-changed-d5a8d000-6995-4d04-b7ff-431a2456a13e. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 916.530669] env[63538]: DEBUG oslo_concurrency.lockutils [req-7af055b9-0895-497c-a53e-f813436147db req-4c8ea05b-0a50-4e78-a9ea-61db07bfe3c7 service nova] Acquiring lock "refresh_cache-b47925eb-3d97-415b-9410-2e325da5ce79" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.530820] env[63538]: DEBUG oslo_concurrency.lockutils [req-7af055b9-0895-497c-a53e-f813436147db req-4c8ea05b-0a50-4e78-a9ea-61db07bfe3c7 service nova] Acquired lock "refresh_cache-b47925eb-3d97-415b-9410-2e325da5ce79" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.530986] env[63538]: DEBUG nova.network.neutron [req-7af055b9-0895-497c-a53e-f813436147db req-4c8ea05b-0a50-4e78-a9ea-61db07bfe3c7 service nova] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Refreshing network info cache for port d5a8d000-6995-4d04-b7ff-431a2456a13e {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 916.627602] env[63538]: DEBUG oslo_vmware.api [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101209, 'name': Destroy_Task, 'duration_secs': 0.854564} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.627884] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Destroyed the VM [ 916.628163] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Deleting Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 916.628426] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-30707cf9-d04d-4e83-b6e3-06d22b85ace4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.636019] env[63538]: DEBUG oslo_vmware.api [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 916.636019] env[63538]: value = "task-5101211" [ 916.636019] env[63538]: _type = "Task" [ 916.636019] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.645881] env[63538]: DEBUG oslo_vmware.api [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101211, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.794438] env[63538]: DEBUG nova.scheduler.client.report [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 916.875907] env[63538]: DEBUG oslo_vmware.api [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Task: {'id': task-5101210, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.140544} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.876191] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 916.877023] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e9abb6-a1cd-4f10-a6e0-7dcc02257b83 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.880319] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "a2e036ae-318b-44ea-9db0-10fa3838728b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.880319] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "a2e036ae-318b-44ea-9db0-10fa3838728b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.880547] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "a2e036ae-318b-44ea-9db0-10fa3838728b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.880730] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "a2e036ae-318b-44ea-9db0-10fa3838728b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.880899] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "a2e036ae-318b-44ea-9db0-10fa3838728b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.884113] env[63538]: INFO nova.compute.manager [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Terminating instance [ 916.886272] env[63538]: DEBUG nova.compute.manager [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 916.886486] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 916.887481] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b4eafa3-25d5-4d16-b372-f3fccddcf936 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.909576] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] f1838794-710c-4bea-9e73-f6912e1b69f5/f1838794-710c-4bea-9e73-f6912e1b69f5.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 916.910522] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f7888436-a541-48f9-9abc-a7de12e6fd04 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.932521] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 916.934196] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b11d7930-1b3a-4e55-8907-32333401442f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.942731] env[63538]: DEBUG oslo_vmware.api [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Waiting for the task: (returnval){ [ 916.942731] env[63538]: value = "task-5101212" [ 916.942731] env[63538]: _type = "Task" [ 916.942731] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.946306] env[63538]: DEBUG oslo_vmware.api [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 916.946306] env[63538]: value = "task-5101213" [ 916.946306] env[63538]: _type = "Task" [ 916.946306] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.960360] env[63538]: DEBUG oslo_vmware.api [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101213, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.965371] env[63538]: DEBUG oslo_vmware.api [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Task: {'id': task-5101212, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.023252] env[63538]: DEBUG oslo_vmware.api [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52382481-f6a4-8553-7f94-726d48afac1f, 'name': SearchDatastore_Task, 'duration_secs': 0.011098} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.023737] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 917.024069] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] e447c109-4cef-4cc7-9acf-61abc0f47482/e447c109-4cef-4cc7-9acf-61abc0f47482.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 917.024418] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9d10b3b9-e617-4cb7-92a2-4e1240524d83 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.038386] env[63538]: DEBUG oslo_vmware.api [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Waiting for the task: (returnval){ [ 917.038386] env[63538]: value = "task-5101214" [ 917.038386] env[63538]: _type = "Task" [ 917.038386] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.054256] env[63538]: DEBUG oslo_vmware.api [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101214, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.078160] env[63538]: DEBUG nova.network.neutron [req-7af055b9-0895-497c-a53e-f813436147db req-4c8ea05b-0a50-4e78-a9ea-61db07bfe3c7 service nova] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 917.152061] env[63538]: DEBUG oslo_vmware.api [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101211, 'name': RemoveSnapshot_Task} progress is 98%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.206499] env[63538]: DEBUG nova.network.neutron [req-7af055b9-0895-497c-a53e-f813436147db req-4c8ea05b-0a50-4e78-a9ea-61db07bfe3c7 service nova] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.299876] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.362s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.302624] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.840s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.304775] env[63538]: INFO nova.compute.claims [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 917.338651] env[63538]: INFO nova.scheduler.client.report [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Deleted allocations for instance 9c1f7da8-59f6-45bc-8d5f-23c8ec760829 [ 917.458882] env[63538]: DEBUG oslo_vmware.api [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Task: {'id': task-5101212, 'name': ReconfigVM_Task, 'duration_secs': 0.357624} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.459746] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Reconfigured VM instance instance-00000049 to attach disk [datastore2] f1838794-710c-4bea-9e73-f6912e1b69f5/f1838794-710c-4bea-9e73-f6912e1b69f5.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 917.460469] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ab5bae3a-9b70-4eb4-aeb1-e8baccdc6f26 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.467269] env[63538]: DEBUG oslo_vmware.api [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101213, 'name': PowerOffVM_Task, 'duration_secs': 0.322567} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.467573] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 917.467747] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 917.469485] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-da152ef3-ac45-466d-9289-d6341b31a4e8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.471309] env[63538]: DEBUG oslo_vmware.api [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Waiting for the task: (returnval){ [ 917.471309] env[63538]: value = "task-5101215" [ 917.471309] env[63538]: _type = "Task" [ 917.471309] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.482600] env[63538]: DEBUG oslo_vmware.api [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Task: {'id': task-5101215, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.551197] env[63538]: DEBUG oslo_vmware.api [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101214, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.647745] env[63538]: DEBUG oslo_vmware.api [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101211, 'name': RemoveSnapshot_Task} progress is 98%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.711463] env[63538]: DEBUG oslo_concurrency.lockutils [req-7af055b9-0895-497c-a53e-f813436147db req-4c8ea05b-0a50-4e78-a9ea-61db07bfe3c7 service nova] Releasing lock "refresh_cache-b47925eb-3d97-415b-9410-2e325da5ce79" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 917.847891] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6faa13b0-f8e8-42c7-90fd-ec3c0b484cba tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "9c1f7da8-59f6-45bc-8d5f-23c8ec760829" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.200s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.887281] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 917.887640] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 917.888015] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Deleting the datastore file [datastore1] a2e036ae-318b-44ea-9db0-10fa3838728b {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 917.888224] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b1de6bd6-d6c5-4907-9ffd-290eb53bdfdb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.896360] env[63538]: DEBUG oslo_vmware.api [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 917.896360] env[63538]: value = "task-5101217" [ 917.896360] env[63538]: _type = "Task" [ 917.896360] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.907922] env[63538]: DEBUG oslo_vmware.api [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101217, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.981826] env[63538]: DEBUG oslo_vmware.api [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Task: {'id': task-5101215, 'name': Rename_Task, 'duration_secs': 0.203809} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.982155] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 917.982440] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-851c8583-1cb8-4f18-b93f-a3958db50dc0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.991043] env[63538]: DEBUG oslo_vmware.api [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Waiting for the task: (returnval){ [ 917.991043] env[63538]: value = "task-5101218" [ 917.991043] env[63538]: _type = "Task" [ 917.991043] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.002728] env[63538]: DEBUG oslo_vmware.api [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Task: {'id': task-5101218, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.050896] env[63538]: DEBUG oslo_vmware.api [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101214, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.649229} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.051371] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] e447c109-4cef-4cc7-9acf-61abc0f47482/e447c109-4cef-4cc7-9acf-61abc0f47482.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 918.051753] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 918.052093] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8d95b07b-7702-41b0-b571-857bf26f5b3d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.059696] env[63538]: DEBUG oslo_vmware.api [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Waiting for the task: (returnval){ [ 918.059696] env[63538]: value = "task-5101219" [ 918.059696] env[63538]: _type = "Task" [ 918.059696] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.068399] env[63538]: DEBUG oslo_vmware.api [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101219, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.147707] env[63538]: DEBUG oslo_vmware.api [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101211, 'name': RemoveSnapshot_Task, 'duration_secs': 1.365201} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.148017] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Deleted Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 918.148283] env[63538]: INFO nova.compute.manager [None req-e4d3cc66-2911-4ed9-a09b-b51d8b75ed47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Took 20.14 seconds to snapshot the instance on the hypervisor. [ 918.406918] env[63538]: DEBUG oslo_vmware.api [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101217, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.358978} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.409835] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 918.410080] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 918.410282] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 918.410469] env[63538]: INFO nova.compute.manager [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Took 1.52 seconds to destroy the instance on the hypervisor. [ 918.410735] env[63538]: DEBUG oslo.service.loopingcall [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 918.411201] env[63538]: DEBUG nova.compute.manager [-] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 918.411300] env[63538]: DEBUG nova.network.neutron [-] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 918.504592] env[63538]: DEBUG oslo_vmware.api [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Task: {'id': task-5101218, 'name': PowerOnVM_Task, 'duration_secs': 0.487778} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.504882] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 918.505183] env[63538]: INFO nova.compute.manager [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Took 8.98 seconds to spawn the instance on the hypervisor. [ 918.505469] env[63538]: DEBUG nova.compute.manager [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 918.506406] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036a8166-ca88-4407-aca6-1f30c547bbf3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.573727] env[63538]: DEBUG oslo_vmware.api [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101219, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070611} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.574073] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 918.574930] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed662f4e-0a48-463e-ad83-a329fbb9864d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.608454] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] e447c109-4cef-4cc7-9acf-61abc0f47482/e447c109-4cef-4cc7-9acf-61abc0f47482.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 918.611456] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad6e572d-3669-424d-a8bc-a013e0685561 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.634199] env[63538]: DEBUG oslo_vmware.api [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Waiting for the task: (returnval){ [ 918.634199] env[63538]: value = "task-5101220" [ 918.634199] env[63538]: _type = "Task" [ 918.634199] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.646292] env[63538]: DEBUG oslo_vmware.api [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101220, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.821014] env[63538]: DEBUG nova.compute.manager [req-fbfa9ab9-475a-4ebd-8e05-82274cc03d86 req-4ece8e66-b886-4d3a-842e-bb6b33c5a39a service nova] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Received event network-vif-plugged-077e00c6-2e89-440c-8653-8742862e8000 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 918.821269] env[63538]: DEBUG oslo_concurrency.lockutils [req-fbfa9ab9-475a-4ebd-8e05-82274cc03d86 req-4ece8e66-b886-4d3a-842e-bb6b33c5a39a service nova] Acquiring lock "b47925eb-3d97-415b-9410-2e325da5ce79-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.821618] env[63538]: DEBUG oslo_concurrency.lockutils [req-fbfa9ab9-475a-4ebd-8e05-82274cc03d86 req-4ece8e66-b886-4d3a-842e-bb6b33c5a39a service nova] Lock "b47925eb-3d97-415b-9410-2e325da5ce79-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.821719] env[63538]: DEBUG oslo_concurrency.lockutils [req-fbfa9ab9-475a-4ebd-8e05-82274cc03d86 req-4ece8e66-b886-4d3a-842e-bb6b33c5a39a service nova] Lock "b47925eb-3d97-415b-9410-2e325da5ce79-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.821948] env[63538]: DEBUG nova.compute.manager [req-fbfa9ab9-475a-4ebd-8e05-82274cc03d86 req-4ece8e66-b886-4d3a-842e-bb6b33c5a39a service nova] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] No waiting events found dispatching network-vif-plugged-077e00c6-2e89-440c-8653-8742862e8000 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 918.822179] env[63538]: WARNING nova.compute.manager [req-fbfa9ab9-475a-4ebd-8e05-82274cc03d86 req-4ece8e66-b886-4d3a-842e-bb6b33c5a39a service nova] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Received unexpected event network-vif-plugged-077e00c6-2e89-440c-8653-8742862e8000 for instance with vm_state building and task_state spawning. [ 918.824313] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4672f540-fc39-4cee-921f-ec1fd1e223da {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.833438] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3cd976e-fc2e-4b42-912f-c3589dc565b5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.842237] env[63538]: DEBUG nova.compute.manager [req-c26fca65-7a25-4c61-9aea-df9d291d10f1 req-140d2839-f5c5-46bd-b7b5-5db7164622e1 service nova] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Received event network-vif-deleted-11d7dbc5-d269-456b-9a7a-601759e64b51 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 918.842448] env[63538]: INFO nova.compute.manager [req-c26fca65-7a25-4c61-9aea-df9d291d10f1 req-140d2839-f5c5-46bd-b7b5-5db7164622e1 service nova] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Neutron deleted interface 11d7dbc5-d269-456b-9a7a-601759e64b51; detaching it from the instance and deleting it from the info cache [ 918.842625] env[63538]: DEBUG nova.network.neutron [req-c26fca65-7a25-4c61-9aea-df9d291d10f1 req-140d2839-f5c5-46bd-b7b5-5db7164622e1 service nova] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.870388] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c40a4edc-eca8-4411-a13d-a962bb359d1d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.874518] env[63538]: DEBUG nova.network.neutron [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Successfully updated port: 077e00c6-2e89-440c-8653-8742862e8000 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 918.884479] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b68d54d-d6cd-4cc7-8b5b-b5e7f8473048 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.903030] env[63538]: DEBUG nova.compute.provider_tree [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 919.030581] env[63538]: INFO nova.compute.manager [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Took 51.54 seconds to build instance. [ 919.144350] env[63538]: DEBUG oslo_vmware.api [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101220, 'name': ReconfigVM_Task, 'duration_secs': 0.351657} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.144673] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Reconfigured VM instance instance-0000004a to attach disk [datastore2] e447c109-4cef-4cc7-9acf-61abc0f47482/e447c109-4cef-4cc7-9acf-61abc0f47482.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 919.145380] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b5dac77e-ffe6-45e8-b7b3-be7876247bcb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.152585] env[63538]: DEBUG oslo_vmware.api [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Waiting for the task: (returnval){ [ 919.152585] env[63538]: value = "task-5101221" [ 919.152585] env[63538]: _type = "Task" [ 919.152585] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.162273] env[63538]: DEBUG oslo_vmware.api [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101221, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.256965] env[63538]: DEBUG nova.network.neutron [-] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.370799] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8fe4ca6c-49d1-4b1a-93f6-e1dd5acc8820 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.376847] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Acquiring lock "refresh_cache-b47925eb-3d97-415b-9410-2e325da5ce79" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 919.377012] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Acquired lock "refresh_cache-b47925eb-3d97-415b-9410-2e325da5ce79" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.377181] env[63538]: DEBUG nova.network.neutron [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 919.382880] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-818ad1d2-37f9-48ca-bcd7-6f07b4f847a0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.406372] env[63538]: DEBUG nova.scheduler.client.report [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 919.426155] env[63538]: DEBUG nova.compute.manager [req-c26fca65-7a25-4c61-9aea-df9d291d10f1 req-140d2839-f5c5-46bd-b7b5-5db7164622e1 service nova] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Detach interface failed, port_id=11d7dbc5-d269-456b-9a7a-601759e64b51, reason: Instance a2e036ae-318b-44ea-9db0-10fa3838728b could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 919.533062] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0540092e-ef81-496b-a6db-4f1761f2ae71 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Lock "f1838794-710c-4bea-9e73-f6912e1b69f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.446s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.667055] env[63538]: DEBUG oslo_vmware.api [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101221, 'name': Rename_Task, 'duration_secs': 0.141499} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.667055] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 919.667275] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-687b5788-ccef-4fa0-a0f4-dcd094415a76 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.670715] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c092b93d-c6b6-4106-870b-dd855bc90e6a tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Acquiring lock "interface-f1838794-710c-4bea-9e73-f6912e1b69f5-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.670959] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c092b93d-c6b6-4106-870b-dd855bc90e6a tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Lock "interface-f1838794-710c-4bea-9e73-f6912e1b69f5-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.671298] env[63538]: DEBUG nova.objects.instance [None req-c092b93d-c6b6-4106-870b-dd855bc90e6a tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Lazy-loading 'flavor' on Instance uuid f1838794-710c-4bea-9e73-f6912e1b69f5 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 919.675025] env[63538]: DEBUG oslo_vmware.api [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Waiting for the task: (returnval){ [ 919.675025] env[63538]: value = "task-5101222" [ 919.675025] env[63538]: _type = "Task" [ 919.675025] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.686206] env[63538]: DEBUG oslo_vmware.api [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101222, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.761227] env[63538]: INFO nova.compute.manager [-] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Took 1.35 seconds to deallocate network for instance. [ 919.911361] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.609s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.911906] env[63538]: DEBUG nova.compute.manager [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 919.916067] env[63538]: DEBUG oslo_concurrency.lockutils [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.418s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.917659] env[63538]: INFO nova.compute.claims [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 919.922384] env[63538]: DEBUG nova.network.neutron [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 920.175696] env[63538]: DEBUG nova.objects.instance [None req-c092b93d-c6b6-4106-870b-dd855bc90e6a tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Lazy-loading 'pci_requests' on Instance uuid f1838794-710c-4bea-9e73-f6912e1b69f5 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 920.188769] env[63538]: DEBUG oslo_vmware.api [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101222, 'name': PowerOnVM_Task, 'duration_secs': 0.500091} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.188769] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 920.188769] env[63538]: INFO nova.compute.manager [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Took 7.93 seconds to spawn the instance on the hypervisor. [ 920.188769] env[63538]: DEBUG nova.compute.manager [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 920.189775] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34251930-756e-42f0-9ba8-4b5e25405818 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.273795] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.275090] env[63538]: DEBUG nova.compute.manager [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 920.276178] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35f76f1e-82ee-4d5e-b5ca-5ecf505ea380 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.339447] env[63538]: DEBUG nova.network.neutron [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Updating instance_info_cache with network_info: [{"id": "d5a8d000-6995-4d04-b7ff-431a2456a13e", "address": "fa:16:3e:f8:6e:97", "network": {"id": "fbcfee83-91c7-4538-bbd4-d5c6159d6aa1", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1159869692", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.42", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6906dcd3e0074931bdbe4233fbc2bf95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5a8d000-69", "ovs_interfaceid": "d5a8d000-6995-4d04-b7ff-431a2456a13e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "077e00c6-2e89-440c-8653-8742862e8000", "address": "fa:16:3e:8d:5e:fa", "network": {"id": "6f2c74bd-42d7-4ea5-a715-45fb77e37604", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-155494244", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.132", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "6906dcd3e0074931bdbe4233fbc2bf95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d298db54-f13d-4bf6-b6c2-755074b3047f", "external-id": "nsx-vlan-transportzone-631", "segmentation_id": 631, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap077e00c6-2e", "ovs_interfaceid": "077e00c6-2e89-440c-8653-8742862e8000", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.417728] env[63538]: DEBUG nova.compute.utils [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 920.419906] env[63538]: DEBUG nova.compute.manager [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 920.420303] env[63538]: DEBUG nova.network.neutron [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 920.481173] env[63538]: DEBUG nova.policy [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '884364daa8f746fb9b32a8b33c9e2cfd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea05f3fb4676466bb2a286f5a2fefb8f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 920.681589] env[63538]: DEBUG nova.objects.base [None req-c092b93d-c6b6-4106-870b-dd855bc90e6a tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=63538) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 920.681821] env[63538]: DEBUG nova.network.neutron [None req-c092b93d-c6b6-4106-870b-dd855bc90e6a tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 920.713588] env[63538]: INFO nova.compute.manager [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Took 45.93 seconds to build instance. [ 920.772358] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c092b93d-c6b6-4106-870b-dd855bc90e6a tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Lock "interface-f1838794-710c-4bea-9e73-f6912e1b69f5-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.101s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.787351] env[63538]: DEBUG nova.network.neutron [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Successfully created port: 733479ad-a05b-44cf-a265-b766fea3b2ba {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 920.790868] env[63538]: INFO nova.compute.manager [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] instance snapshotting [ 920.795127] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d89689ad-be31-4b00-8390-bfb4b8ca436e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.821375] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d6d0eb-363d-42a5-9435-2ca911540126 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.842071] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Releasing lock "refresh_cache-b47925eb-3d97-415b-9410-2e325da5ce79" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 920.842432] env[63538]: DEBUG nova.compute.manager [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Instance network_info: |[{"id": "d5a8d000-6995-4d04-b7ff-431a2456a13e", "address": "fa:16:3e:f8:6e:97", "network": {"id": "fbcfee83-91c7-4538-bbd4-d5c6159d6aa1", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1159869692", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.42", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6906dcd3e0074931bdbe4233fbc2bf95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5a8d000-69", "ovs_interfaceid": "d5a8d000-6995-4d04-b7ff-431a2456a13e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "077e00c6-2e89-440c-8653-8742862e8000", "address": "fa:16:3e:8d:5e:fa", "network": {"id": "6f2c74bd-42d7-4ea5-a715-45fb77e37604", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-155494244", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.132", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "6906dcd3e0074931bdbe4233fbc2bf95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d298db54-f13d-4bf6-b6c2-755074b3047f", "external-id": "nsx-vlan-transportzone-631", "segmentation_id": 631, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap077e00c6-2e", "ovs_interfaceid": "077e00c6-2e89-440c-8653-8742862e8000", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 920.842933] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:6e:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '99639c37-b0c6-4be7-9594-230e44b1280b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd5a8d000-6995-4d04-b7ff-431a2456a13e', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:5e:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd298db54-f13d-4bf6-b6c2-755074b3047f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '077e00c6-2e89-440c-8653-8742862e8000', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 920.856596] env[63538]: DEBUG oslo.service.loopingcall [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 920.856596] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 920.856596] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5f06659a-e3b0-47a0-9c1d-8fca7f4e71fc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.881661] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 920.881661] env[63538]: value = "task-5101223" [ 920.881661] env[63538]: _type = "Task" [ 920.881661] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.890625] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101223, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.924539] env[63538]: DEBUG nova.compute.manager [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 921.216125] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0a4578c6-ffd3-408f-adb2-b410f503abf9 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Lock "e447c109-4cef-4cc7-9acf-61abc0f47482" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.458s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.333886] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Creating Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 921.334657] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0a60f064-8abc-45e0-8cff-c805191ef517 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.339993] env[63538]: DEBUG nova.compute.manager [req-db58d86e-344f-444b-ae9d-e3bed229d77e req-2595314b-1d91-4faf-a394-d643f115a62c service nova] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Received event network-changed-077e00c6-2e89-440c-8653-8742862e8000 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 921.340292] env[63538]: DEBUG nova.compute.manager [req-db58d86e-344f-444b-ae9d-e3bed229d77e req-2595314b-1d91-4faf-a394-d643f115a62c service nova] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Refreshing instance network info cache due to event network-changed-077e00c6-2e89-440c-8653-8742862e8000. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 921.340480] env[63538]: DEBUG oslo_concurrency.lockutils [req-db58d86e-344f-444b-ae9d-e3bed229d77e req-2595314b-1d91-4faf-a394-d643f115a62c service nova] Acquiring lock "refresh_cache-b47925eb-3d97-415b-9410-2e325da5ce79" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 921.340648] env[63538]: DEBUG oslo_concurrency.lockutils [req-db58d86e-344f-444b-ae9d-e3bed229d77e req-2595314b-1d91-4faf-a394-d643f115a62c service nova] Acquired lock "refresh_cache-b47925eb-3d97-415b-9410-2e325da5ce79" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.340833] env[63538]: DEBUG nova.network.neutron [req-db58d86e-344f-444b-ae9d-e3bed229d77e req-2595314b-1d91-4faf-a394-d643f115a62c service nova] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Refreshing network info cache for port 077e00c6-2e89-440c-8653-8742862e8000 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 921.352511] env[63538]: DEBUG oslo_vmware.api [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 921.352511] env[63538]: value = "task-5101224" [ 921.352511] env[63538]: _type = "Task" [ 921.352511] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.364454] env[63538]: DEBUG oslo_vmware.api [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101224, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.394681] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101223, 'name': CreateVM_Task, 'duration_secs': 0.478186} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.394866] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 921.395869] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 921.396110] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.396460] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 921.396741] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32b3ea96-a947-4e48-be1c-48d644cef884 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.400059] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a7df76f-0a91-489f-8f0b-c15b3450b87b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.406663] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Waiting for the task: (returnval){ [ 921.406663] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bbd341-4b83-6ae5-c947-09c00d160aa8" [ 921.406663] env[63538]: _type = "Task" [ 921.406663] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.413049] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce79b81-c040-4aef-ab88-9fb6d4dda7c1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.425308] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bbd341-4b83-6ae5-c947-09c00d160aa8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.454751] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa26e1e-bdec-4173-b0d1-2694c6280991 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.463679] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a14b6075-c19d-4bf4-ad52-f74214482a0b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.478598] env[63538]: DEBUG nova.compute.provider_tree [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 921.543769] env[63538]: INFO nova.compute.manager [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Rescuing [ 921.544136] env[63538]: DEBUG oslo_concurrency.lockutils [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Acquiring lock "refresh_cache-e447c109-4cef-4cc7-9acf-61abc0f47482" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 921.544278] env[63538]: DEBUG oslo_concurrency.lockutils [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Acquired lock "refresh_cache-e447c109-4cef-4cc7-9acf-61abc0f47482" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.544421] env[63538]: DEBUG nova.network.neutron [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 921.866279] env[63538]: DEBUG oslo_vmware.api [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101224, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.919673] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bbd341-4b83-6ae5-c947-09c00d160aa8, 'name': SearchDatastore_Task, 'duration_secs': 0.023228} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.920086] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 921.920345] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 921.920594] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 921.920745] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.920936] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 921.921264] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ad545b6-fa49-4b7e-ba7c-b79775985969 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.932316] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 921.932602] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 921.933541] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ebffc6f-70c5-48dd-8f7b-f9129d29c9ed {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.943501] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Waiting for the task: (returnval){ [ 921.943501] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5297118d-d607-d7d2-bcd4-24cedf8329c3" [ 921.943501] env[63538]: _type = "Task" [ 921.943501] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.956404] env[63538]: DEBUG nova.compute.manager [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 921.958987] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5297118d-d607-d7d2-bcd4-24cedf8329c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.985093] env[63538]: DEBUG nova.scheduler.client.report [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 922.000499] env[63538]: DEBUG nova.virt.hardware [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 922.000940] env[63538]: DEBUG nova.virt.hardware [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 922.001213] env[63538]: DEBUG nova.virt.hardware [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 922.001457] env[63538]: DEBUG nova.virt.hardware [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 922.001671] env[63538]: DEBUG nova.virt.hardware [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 922.001856] env[63538]: DEBUG nova.virt.hardware [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 922.002279] env[63538]: DEBUG nova.virt.hardware [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 922.002580] env[63538]: DEBUG nova.virt.hardware [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 922.002896] env[63538]: DEBUG nova.virt.hardware [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 922.003225] env[63538]: DEBUG nova.virt.hardware [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 922.003562] env[63538]: DEBUG nova.virt.hardware [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 922.005539] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6b55cb1-e50b-4550-aac6-a215cb06c220 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.018566] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-793e0f23-636e-457c-a126-4a1bdd872932 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.144407] env[63538]: DEBUG nova.network.neutron [req-db58d86e-344f-444b-ae9d-e3bed229d77e req-2595314b-1d91-4faf-a394-d643f115a62c service nova] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Updated VIF entry in instance network info cache for port 077e00c6-2e89-440c-8653-8742862e8000. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 922.144854] env[63538]: DEBUG nova.network.neutron [req-db58d86e-344f-444b-ae9d-e3bed229d77e req-2595314b-1d91-4faf-a394-d643f115a62c service nova] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Updating instance_info_cache with network_info: [{"id": "d5a8d000-6995-4d04-b7ff-431a2456a13e", "address": "fa:16:3e:f8:6e:97", "network": {"id": "fbcfee83-91c7-4538-bbd4-d5c6159d6aa1", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1159869692", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.42", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6906dcd3e0074931bdbe4233fbc2bf95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5a8d000-69", "ovs_interfaceid": "d5a8d000-6995-4d04-b7ff-431a2456a13e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "077e00c6-2e89-440c-8653-8742862e8000", "address": "fa:16:3e:8d:5e:fa", "network": {"id": "6f2c74bd-42d7-4ea5-a715-45fb77e37604", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-155494244", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.132", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "6906dcd3e0074931bdbe4233fbc2bf95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d298db54-f13d-4bf6-b6c2-755074b3047f", "external-id": "nsx-vlan-transportzone-631", "segmentation_id": 631, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap077e00c6-2e", "ovs_interfaceid": "077e00c6-2e89-440c-8653-8742862e8000", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.359543] env[63538]: DEBUG nova.network.neutron [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Updating instance_info_cache with network_info: [{"id": "312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b", "address": "fa:16:3e:0b:f9:6b", "network": {"id": "268f8bc3-cc14-4aad-aa44-87cd6e506681", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-818769698-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b78ab12ed0254cf4b3ccb7c231ca810d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap312c7ab1-93", "ovs_interfaceid": "312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.367708] env[63538]: DEBUG oslo_vmware.api [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101224, 'name': CreateSnapshot_Task, 'duration_secs': 0.954697} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.368058] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Created Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 922.368820] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2861a8e4-ec01-4df7-a30f-2767f3f3ce73 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.458270] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5297118d-d607-d7d2-bcd4-24cedf8329c3, 'name': SearchDatastore_Task, 'duration_secs': 0.012994} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.459118] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfa59527-751f-48a9-b3fa-6fc408b0bf78 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.466480] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Waiting for the task: (returnval){ [ 922.466480] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52aafcf8-1d22-39c2-1d94-b2efdb5f50bc" [ 922.466480] env[63538]: _type = "Task" [ 922.466480] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.475595] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52aafcf8-1d22-39c2-1d94-b2efdb5f50bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.493915] env[63538]: DEBUG oslo_concurrency.lockutils [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.575s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.493915] env[63538]: DEBUG nova.compute.manager [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 922.494839] env[63538]: DEBUG oslo_concurrency.lockutils [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.228s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.495263] env[63538]: DEBUG nova.objects.instance [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lazy-loading 'resources' on Instance uuid ade3cce6-5662-4199-96f4-398436f840d8 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 922.650674] env[63538]: DEBUG nova.network.neutron [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Successfully updated port: 733479ad-a05b-44cf-a265-b766fea3b2ba {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 922.650674] env[63538]: DEBUG oslo_concurrency.lockutils [req-db58d86e-344f-444b-ae9d-e3bed229d77e req-2595314b-1d91-4faf-a394-d643f115a62c service nova] Releasing lock "refresh_cache-b47925eb-3d97-415b-9410-2e325da5ce79" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 922.759845] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Acquiring lock "f1838794-710c-4bea-9e73-f6912e1b69f5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.760213] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Lock "f1838794-710c-4bea-9e73-f6912e1b69f5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.760459] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Acquiring lock "f1838794-710c-4bea-9e73-f6912e1b69f5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.760687] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Lock "f1838794-710c-4bea-9e73-f6912e1b69f5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.760891] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Lock "f1838794-710c-4bea-9e73-f6912e1b69f5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.765944] env[63538]: INFO nova.compute.manager [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Terminating instance [ 922.768806] env[63538]: DEBUG nova.compute.manager [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 922.768806] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 922.769544] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfbf31f0-65f2-43fe-83e3-9386d939e968 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.777649] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 922.777924] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-717536b5-4d1c-4a43-8e18-845572e22546 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.785082] env[63538]: DEBUG oslo_vmware.api [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Waiting for the task: (returnval){ [ 922.785082] env[63538]: value = "task-5101225" [ 922.785082] env[63538]: _type = "Task" [ 922.785082] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.798811] env[63538]: DEBUG oslo_vmware.api [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Task: {'id': task-5101225, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.862737] env[63538]: DEBUG oslo_concurrency.lockutils [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Releasing lock "refresh_cache-e447c109-4cef-4cc7-9acf-61abc0f47482" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 922.893527] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Creating linked-clone VM from snapshot {{(pid=63538) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 922.894244] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-bc907011-6f37-482f-87f3-40943840d35a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.904174] env[63538]: DEBUG oslo_vmware.api [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 922.904174] env[63538]: value = "task-5101226" [ 922.904174] env[63538]: _type = "Task" [ 922.904174] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.916439] env[63538]: DEBUG oslo_vmware.api [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101226, 'name': CloneVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.979267] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52aafcf8-1d22-39c2-1d94-b2efdb5f50bc, 'name': SearchDatastore_Task, 'duration_secs': 0.044849} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.982798] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 922.983183] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] b47925eb-3d97-415b-9410-2e325da5ce79/b47925eb-3d97-415b-9410-2e325da5ce79.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 922.983809] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-24cf4961-471f-41e1-9f9b-00fd1e5ba05f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.999291] env[63538]: DEBUG nova.compute.utils [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 923.006022] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Waiting for the task: (returnval){ [ 923.006022] env[63538]: value = "task-5101227" [ 923.006022] env[63538]: _type = "Task" [ 923.006022] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.007617] env[63538]: DEBUG nova.compute.manager [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 923.007864] env[63538]: DEBUG nova.network.neutron [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 923.026352] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5101227, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.088714] env[63538]: DEBUG nova.policy [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '16fdc041f4c74e0ea76ee8984f9786f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a701618902d411b8af203fdbb1069be', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 923.151455] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "refresh_cache-f5d92749-04d6-4935-8dc6-afb692222df0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 923.151612] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired lock "refresh_cache-f5d92749-04d6-4935-8dc6-afb692222df0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.151765] env[63538]: DEBUG nova.network.neutron [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 923.296937] env[63538]: DEBUG oslo_vmware.api [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Task: {'id': task-5101225, 'name': PowerOffVM_Task, 'duration_secs': 0.214403} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.300588] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 923.301136] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 923.302348] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b5fc7f6d-a083-435b-9589-2db1572107c0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.380256] env[63538]: DEBUG nova.compute.manager [req-08138ea7-3dbe-4d2c-8e16-45a0378fa32b req-15b151eb-febc-4f5e-b2fb-a76eba971919 service nova] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Received event network-vif-plugged-733479ad-a05b-44cf-a265-b766fea3b2ba {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 923.380647] env[63538]: DEBUG oslo_concurrency.lockutils [req-08138ea7-3dbe-4d2c-8e16-45a0378fa32b req-15b151eb-febc-4f5e-b2fb-a76eba971919 service nova] Acquiring lock "f5d92749-04d6-4935-8dc6-afb692222df0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.381016] env[63538]: DEBUG oslo_concurrency.lockutils [req-08138ea7-3dbe-4d2c-8e16-45a0378fa32b req-15b151eb-febc-4f5e-b2fb-a76eba971919 service nova] Lock "f5d92749-04d6-4935-8dc6-afb692222df0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.381694] env[63538]: DEBUG oslo_concurrency.lockutils [req-08138ea7-3dbe-4d2c-8e16-45a0378fa32b req-15b151eb-febc-4f5e-b2fb-a76eba971919 service nova] Lock "f5d92749-04d6-4935-8dc6-afb692222df0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.381937] env[63538]: DEBUG nova.compute.manager [req-08138ea7-3dbe-4d2c-8e16-45a0378fa32b req-15b151eb-febc-4f5e-b2fb-a76eba971919 service nova] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] No waiting events found dispatching network-vif-plugged-733479ad-a05b-44cf-a265-b766fea3b2ba {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 923.382150] env[63538]: WARNING nova.compute.manager [req-08138ea7-3dbe-4d2c-8e16-45a0378fa32b req-15b151eb-febc-4f5e-b2fb-a76eba971919 service nova] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Received unexpected event network-vif-plugged-733479ad-a05b-44cf-a265-b766fea3b2ba for instance with vm_state building and task_state spawning. [ 923.382330] env[63538]: DEBUG nova.compute.manager [req-08138ea7-3dbe-4d2c-8e16-45a0378fa32b req-15b151eb-febc-4f5e-b2fb-a76eba971919 service nova] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Received event network-changed-733479ad-a05b-44cf-a265-b766fea3b2ba {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 923.382483] env[63538]: DEBUG nova.compute.manager [req-08138ea7-3dbe-4d2c-8e16-45a0378fa32b req-15b151eb-febc-4f5e-b2fb-a76eba971919 service nova] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Refreshing instance network info cache due to event network-changed-733479ad-a05b-44cf-a265-b766fea3b2ba. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 923.382675] env[63538]: DEBUG oslo_concurrency.lockutils [req-08138ea7-3dbe-4d2c-8e16-45a0378fa32b req-15b151eb-febc-4f5e-b2fb-a76eba971919 service nova] Acquiring lock "refresh_cache-f5d92749-04d6-4935-8dc6-afb692222df0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 923.393931] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 923.394217] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 923.394530] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Deleting the datastore file [datastore2] f1838794-710c-4bea-9e73-f6912e1b69f5 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 923.395709] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81d5712d-dc9b-405b-bcc0-61f669e7cc93 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.411750] env[63538]: DEBUG oslo_vmware.api [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Waiting for the task: (returnval){ [ 923.411750] env[63538]: value = "task-5101229" [ 923.411750] env[63538]: _type = "Task" [ 923.411750] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.420466] env[63538]: DEBUG oslo_vmware.api [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101226, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.429815] env[63538]: DEBUG oslo_vmware.api [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Task: {'id': task-5101229, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.489512] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 923.489512] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68f3cbef-c37e-4891-8bf8-146a01f91352 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.493091] env[63538]: DEBUG nova.network.neutron [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Successfully created port: 31cf3b33-b97d-4183-a21c-80e24e27351f {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 923.505664] env[63538]: DEBUG oslo_vmware.api [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Waiting for the task: (returnval){ [ 923.505664] env[63538]: value = "task-5101230" [ 923.505664] env[63538]: _type = "Task" [ 923.505664] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.509534] env[63538]: DEBUG nova.compute.manager [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 923.519888] env[63538]: DEBUG oslo_vmware.api [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101230, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.522122] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-203a444f-6a73-4b8a-8e8e-e1d104fbff14 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.528935] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5101227, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.536209] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a43af69f-eb2f-45a3-b810-3713091ab349 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.578905] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27edb1ff-aee1-4dff-ac15-368e6dca9312 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.592119] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b52f3b-5296-4b7c-89ca-32d167933253 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.609754] env[63538]: DEBUG nova.compute.provider_tree [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 923.713712] env[63538]: DEBUG nova.network.neutron [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 923.918751] env[63538]: DEBUG oslo_vmware.api [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101226, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.929544] env[63538]: DEBUG oslo_vmware.api [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Task: {'id': task-5101229, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.010572] env[63538]: DEBUG nova.network.neutron [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Updating instance_info_cache with network_info: [{"id": "733479ad-a05b-44cf-a265-b766fea3b2ba", "address": "fa:16:3e:0a:5a:53", "network": {"id": "4aa7976c-5900-4be2-a5e2-8d641ac24be9", "bridge": "br-int", "label": "tempest-ServersTestJSON-119331503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea05f3fb4676466bb2a286f5a2fefb8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap733479ad-a0", "ovs_interfaceid": "733479ad-a05b-44cf-a265-b766fea3b2ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.025639] env[63538]: DEBUG oslo_vmware.api [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101230, 'name': PowerOffVM_Task, 'duration_secs': 0.40151} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.025639] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 924.025811] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b627715-3861-4f2b-b6c9-f75c24d69093 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.032455] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5101227, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.707686} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.033889] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] b47925eb-3d97-415b-9410-2e325da5ce79/b47925eb-3d97-415b-9410-2e325da5ce79.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 924.034139] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 924.034702] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c3e3edce-e25a-4b5b-87e2-236537f193c4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.055926] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d9c4ba0-d4b8-494c-9715-1e869c91c766 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.060982] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Waiting for the task: (returnval){ [ 924.060982] env[63538]: value = "task-5101231" [ 924.060982] env[63538]: _type = "Task" [ 924.060982] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.077216] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5101231, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.104950] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 924.105791] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8487346a-8937-4c4e-9707-9457c48baab2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.112725] env[63538]: DEBUG nova.scheduler.client.report [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 924.117877] env[63538]: DEBUG oslo_vmware.api [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Waiting for the task: (returnval){ [ 924.117877] env[63538]: value = "task-5101232" [ 924.117877] env[63538]: _type = "Task" [ 924.117877] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.128720] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] VM already powered off {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 924.128969] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 924.129260] env[63538]: DEBUG oslo_concurrency.lockutils [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 924.129421] env[63538]: DEBUG oslo_concurrency.lockutils [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.129611] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 924.130567] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d201d28-3bb4-4bec-8081-fa1662bae862 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.139889] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 924.140158] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 924.141236] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6ece12e-60dd-43f7-b3b4-1257d40f2c1c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.147340] env[63538]: DEBUG oslo_vmware.api [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Waiting for the task: (returnval){ [ 924.147340] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52027305-63f5-12f5-9cf2-f187ea6dd840" [ 924.147340] env[63538]: _type = "Task" [ 924.147340] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.157154] env[63538]: DEBUG oslo_vmware.api [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52027305-63f5-12f5-9cf2-f187ea6dd840, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.416534] env[63538]: DEBUG oslo_vmware.api [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101226, 'name': CloneVM_Task, 'duration_secs': 1.391151} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.416856] env[63538]: INFO nova.virt.vmwareapi.vmops [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Created linked-clone VM from snapshot [ 924.417659] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d29257f-7f7a-420c-aee5-50a6d9197bcb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.428100] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Uploading image a24c0f28-e6a9-4f20-ac43-e49948a73214 {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 924.433151] env[63538]: DEBUG oslo_vmware.api [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Task: {'id': task-5101229, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.528959} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.433356] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 924.433544] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 924.433722] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 924.433896] env[63538]: INFO nova.compute.manager [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Took 1.67 seconds to destroy the instance on the hypervisor. [ 924.434157] env[63538]: DEBUG oslo.service.loopingcall [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 924.434359] env[63538]: DEBUG nova.compute.manager [-] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 924.434453] env[63538]: DEBUG nova.network.neutron [-] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 924.449421] env[63538]: DEBUG oslo_vmware.rw_handles [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 924.449421] env[63538]: value = "vm-992442" [ 924.449421] env[63538]: _type = "VirtualMachine" [ 924.449421] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 924.449697] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a11c39cd-6340-4d2a-b85e-ebdb9f859e6b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.457619] env[63538]: DEBUG oslo_vmware.rw_handles [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lease: (returnval){ [ 924.457619] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5244c4d5-ae07-66a9-756a-4e700c118cc5" [ 924.457619] env[63538]: _type = "HttpNfcLease" [ 924.457619] env[63538]: } obtained for exporting VM: (result){ [ 924.457619] env[63538]: value = "vm-992442" [ 924.457619] env[63538]: _type = "VirtualMachine" [ 924.457619] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 924.457959] env[63538]: DEBUG oslo_vmware.api [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the lease: (returnval){ [ 924.457959] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5244c4d5-ae07-66a9-756a-4e700c118cc5" [ 924.457959] env[63538]: _type = "HttpNfcLease" [ 924.457959] env[63538]: } to be ready. {{(pid=63538) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 924.464753] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 924.464753] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5244c4d5-ae07-66a9-756a-4e700c118cc5" [ 924.464753] env[63538]: _type = "HttpNfcLease" [ 924.464753] env[63538]: } is initializing. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 924.513910] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Releasing lock "refresh_cache-f5d92749-04d6-4935-8dc6-afb692222df0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 924.514292] env[63538]: DEBUG nova.compute.manager [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Instance network_info: |[{"id": "733479ad-a05b-44cf-a265-b766fea3b2ba", "address": "fa:16:3e:0a:5a:53", "network": {"id": "4aa7976c-5900-4be2-a5e2-8d641ac24be9", "bridge": "br-int", "label": "tempest-ServersTestJSON-119331503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea05f3fb4676466bb2a286f5a2fefb8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap733479ad-a0", "ovs_interfaceid": "733479ad-a05b-44cf-a265-b766fea3b2ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 924.514643] env[63538]: DEBUG oslo_concurrency.lockutils [req-08138ea7-3dbe-4d2c-8e16-45a0378fa32b req-15b151eb-febc-4f5e-b2fb-a76eba971919 service nova] Acquired lock "refresh_cache-f5d92749-04d6-4935-8dc6-afb692222df0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.514926] env[63538]: DEBUG nova.network.neutron [req-08138ea7-3dbe-4d2c-8e16-45a0378fa32b req-15b151eb-febc-4f5e-b2fb-a76eba971919 service nova] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Refreshing network info cache for port 733479ad-a05b-44cf-a265-b766fea3b2ba {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 924.516389] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:5a:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '733479ad-a05b-44cf-a265-b766fea3b2ba', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 924.524521] env[63538]: DEBUG oslo.service.loopingcall [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 924.525741] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 924.526896] env[63538]: DEBUG nova.compute.manager [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 924.529284] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f748df57-d0ad-4a65-ae92-2be1a4235ce6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.552649] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 924.552649] env[63538]: value = "task-5101234" [ 924.552649] env[63538]: _type = "Task" [ 924.552649] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.555028] env[63538]: DEBUG nova.virt.hardware [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 924.555294] env[63538]: DEBUG nova.virt.hardware [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 924.555459] env[63538]: DEBUG nova.virt.hardware [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 924.555644] env[63538]: DEBUG nova.virt.hardware [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 924.555794] env[63538]: DEBUG nova.virt.hardware [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 924.555971] env[63538]: DEBUG nova.virt.hardware [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 924.556224] env[63538]: DEBUG nova.virt.hardware [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 924.556423] env[63538]: DEBUG nova.virt.hardware [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 924.556564] env[63538]: DEBUG nova.virt.hardware [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 924.556729] env[63538]: DEBUG nova.virt.hardware [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 924.556906] env[63538]: DEBUG nova.virt.hardware [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 924.557896] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c60a1927-29f5-4fde-8a7b-28a28d8f7cba {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.574119] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51df7d6f-82b4-4f49-9397-8d38fc63c49f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.578164] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101234, 'name': CreateVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.581964] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5101231, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073359} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.582689] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 924.583525] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ea2d73-5cd9-4e23-8dd3-d231cf77f19d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.621285] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] b47925eb-3d97-415b-9410-2e325da5ce79/b47925eb-3d97-415b-9410-2e325da5ce79.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 924.622546] env[63538]: DEBUG oslo_concurrency.lockutils [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.128s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.625309] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d13d6fa-f562-481e-9b56-8a5bfc6cb53e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.640832] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.288s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.640994] env[63538]: DEBUG nova.objects.instance [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lazy-loading 'resources' on Instance uuid 8fb62f47-cbf2-4b46-bc33-845e832f9ef0 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 924.648978] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Waiting for the task: (returnval){ [ 924.648978] env[63538]: value = "task-5101235" [ 924.648978] env[63538]: _type = "Task" [ 924.648978] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.662630] env[63538]: DEBUG oslo_vmware.api [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52027305-63f5-12f5-9cf2-f187ea6dd840, 'name': SearchDatastore_Task, 'duration_secs': 0.011219} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.667257] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5101235, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.668309] env[63538]: INFO nova.scheduler.client.report [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Deleted allocations for instance ade3cce6-5662-4199-96f4-398436f840d8 [ 924.669396] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f312bd99-d125-4078-8fab-324628d7375f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.679230] env[63538]: DEBUG oslo_vmware.api [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Waiting for the task: (returnval){ [ 924.679230] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5295364b-bf2a-3fe0-77ab-bb4f992e9317" [ 924.679230] env[63538]: _type = "Task" [ 924.679230] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.691352] env[63538]: DEBUG oslo_vmware.api [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5295364b-bf2a-3fe0-77ab-bb4f992e9317, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.967042] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 924.967042] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5244c4d5-ae07-66a9-756a-4e700c118cc5" [ 924.967042] env[63538]: _type = "HttpNfcLease" [ 924.967042] env[63538]: } is ready. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 924.967517] env[63538]: DEBUG oslo_vmware.rw_handles [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 924.967517] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5244c4d5-ae07-66a9-756a-4e700c118cc5" [ 924.967517] env[63538]: _type = "HttpNfcLease" [ 924.967517] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 924.968222] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b100f1e7-6808-4ac1-88f8-8876fb2c0042 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.976974] env[63538]: DEBUG oslo_vmware.rw_handles [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52861c08-772a-46af-1e24-33fee86d127d/disk-0.vmdk from lease info. {{(pid=63538) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 924.977209] env[63538]: DEBUG oslo_vmware.rw_handles [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52861c08-772a-46af-1e24-33fee86d127d/disk-0.vmdk for reading. {{(pid=63538) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 925.067491] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101234, 'name': CreateVM_Task, 'duration_secs': 0.378679} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.069892] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 925.070969] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.071180] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.071553] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 925.071830] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-fb266583-e523-4d27-a787-6e576d60518b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.073561] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d1786b8-4da2-43de-a371-92795512a756 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.083038] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 925.083038] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f05684-400f-996a-ea15-9dfe9fcd2722" [ 925.083038] env[63538]: _type = "Task" [ 925.083038] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.096454] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f05684-400f-996a-ea15-9dfe9fcd2722, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.163445] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5101235, 'name': ReconfigVM_Task, 'duration_secs': 0.377206} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.163877] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Reconfigured VM instance instance-0000004b to attach disk [datastore2] b47925eb-3d97-415b-9410-2e325da5ce79/b47925eb-3d97-415b-9410-2e325da5ce79.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 925.164573] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-44e1e105-d562-4771-9efc-8c79576e9a24 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.172042] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Waiting for the task: (returnval){ [ 925.172042] env[63538]: value = "task-5101236" [ 925.172042] env[63538]: _type = "Task" [ 925.172042] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.177860] env[63538]: DEBUG oslo_concurrency.lockutils [None req-334b1af8-4f4b-4aa7-8eac-c923b395a44d tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "ade3cce6-5662-4199-96f4-398436f840d8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.406s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.188617] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5101236, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.201332] env[63538]: DEBUG oslo_vmware.api [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5295364b-bf2a-3fe0-77ab-bb4f992e9317, 'name': SearchDatastore_Task, 'duration_secs': 0.011633} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.205023] env[63538]: DEBUG oslo_concurrency.lockutils [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.205023] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] e447c109-4cef-4cc7-9acf-61abc0f47482/faabbca4-e27b-433a-b93d-f059fd73bc92-rescue.vmdk. {{(pid=63538) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 925.205023] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c3108763-2ee6-4e96-aa30-bb46e7580213 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.213582] env[63538]: DEBUG oslo_vmware.api [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Waiting for the task: (returnval){ [ 925.213582] env[63538]: value = "task-5101237" [ 925.213582] env[63538]: _type = "Task" [ 925.213582] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.228382] env[63538]: DEBUG oslo_vmware.api [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101237, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.276543] env[63538]: DEBUG nova.network.neutron [-] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.414623] env[63538]: DEBUG nova.compute.manager [req-f9d36dde-93d9-4333-b972-02e3f49d2437 req-53c348fe-d651-4bc3-bba1-0069b58c70ba service nova] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Received event network-vif-deleted-3a12b458-0ee6-4994-89a1-0b04d9f01da3 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 925.598894] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f05684-400f-996a-ea15-9dfe9fcd2722, 'name': SearchDatastore_Task, 'duration_secs': 0.027588} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.599424] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.599902] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 925.600372] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.600459] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.600732] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 925.601083] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-27f20541-1763-4eab-8e41-798b9be60f76 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.621805] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 925.622384] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 925.625744] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2a4dca7-6042-4f01-839c-ce02ce12d18b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.632623] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 925.632623] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5296b202-f4e0-bea5-a4f7-2db10807c555" [ 925.632623] env[63538]: _type = "Task" [ 925.632623] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.643864] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5296b202-f4e0-bea5-a4f7-2db10807c555, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.687139] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5101236, 'name': Rename_Task, 'duration_secs': 0.178362} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.688188] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 925.688188] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8275fb5f-1cc3-41f8-ab24-f7902143c28a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.698800] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Waiting for the task: (returnval){ [ 925.698800] env[63538]: value = "task-5101238" [ 925.698800] env[63538]: _type = "Task" [ 925.698800] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.712873] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5101238, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.729901] env[63538]: DEBUG oslo_vmware.api [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101237, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.768110] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17709779-7210-47a6-bfca-47833d292769 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.778226] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32ce8e2a-1040-4250-a4cb-4d61f485fe01 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.783561] env[63538]: INFO nova.compute.manager [-] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Took 1.35 seconds to deallocate network for instance. [ 925.828108] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14584204-e16c-467b-a644-d6d08490a77e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.833505] env[63538]: DEBUG nova.network.neutron [req-08138ea7-3dbe-4d2c-8e16-45a0378fa32b req-15b151eb-febc-4f5e-b2fb-a76eba971919 service nova] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Updated VIF entry in instance network info cache for port 733479ad-a05b-44cf-a265-b766fea3b2ba. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 925.833914] env[63538]: DEBUG nova.network.neutron [req-08138ea7-3dbe-4d2c-8e16-45a0378fa32b req-15b151eb-febc-4f5e-b2fb-a76eba971919 service nova] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Updating instance_info_cache with network_info: [{"id": "733479ad-a05b-44cf-a265-b766fea3b2ba", "address": "fa:16:3e:0a:5a:53", "network": {"id": "4aa7976c-5900-4be2-a5e2-8d641ac24be9", "bridge": "br-int", "label": "tempest-ServersTestJSON-119331503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea05f3fb4676466bb2a286f5a2fefb8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap733479ad-a0", "ovs_interfaceid": "733479ad-a05b-44cf-a265-b766fea3b2ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.839693] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f299ae1c-7043-465c-9f05-91bdd5637410 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.860480] env[63538]: DEBUG nova.compute.provider_tree [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 925.998294] env[63538]: DEBUG nova.network.neutron [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Successfully updated port: 31cf3b33-b97d-4183-a21c-80e24e27351f {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 926.145728] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5296b202-f4e0-bea5-a4f7-2db10807c555, 'name': SearchDatastore_Task, 'duration_secs': 0.037784} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.147056] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c8a58f8-22b8-4e9a-8577-41497caf033e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.155208] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 926.155208] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52049365-ef0f-0b53-6507-57a48460b141" [ 926.155208] env[63538]: _type = "Task" [ 926.155208] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.166769] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52049365-ef0f-0b53-6507-57a48460b141, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.210474] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5101238, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.227410] env[63538]: DEBUG oslo_vmware.api [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101237, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.649785} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.227890] env[63538]: INFO nova.virt.vmwareapi.ds_util [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] e447c109-4cef-4cc7-9acf-61abc0f47482/faabbca4-e27b-433a-b93d-f059fd73bc92-rescue.vmdk. [ 926.228760] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8f2a62d-d031-4539-8e4b-5dc469a776ca {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.255304] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] e447c109-4cef-4cc7-9acf-61abc0f47482/faabbca4-e27b-433a-b93d-f059fd73bc92-rescue.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 926.255654] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7bb8558b-c297-4962-af93-1749352bd0ee {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.275157] env[63538]: DEBUG oslo_vmware.api [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Waiting for the task: (returnval){ [ 926.275157] env[63538]: value = "task-5101239" [ 926.275157] env[63538]: _type = "Task" [ 926.275157] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.283870] env[63538]: DEBUG oslo_vmware.api [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101239, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.298521] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.339472] env[63538]: DEBUG oslo_concurrency.lockutils [req-08138ea7-3dbe-4d2c-8e16-45a0378fa32b req-15b151eb-febc-4f5e-b2fb-a76eba971919 service nova] Releasing lock "refresh_cache-f5d92749-04d6-4935-8dc6-afb692222df0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.364526] env[63538]: DEBUG nova.scheduler.client.report [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 926.501174] env[63538]: DEBUG oslo_concurrency.lockutils [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "refresh_cache-4ec5d3a2-8b29-4074-b323-f94704043b8b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.501446] env[63538]: DEBUG oslo_concurrency.lockutils [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired lock "refresh_cache-4ec5d3a2-8b29-4074-b323-f94704043b8b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.501686] env[63538]: DEBUG nova.network.neutron [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 926.667795] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52049365-ef0f-0b53-6507-57a48460b141, 'name': SearchDatastore_Task, 'duration_secs': 0.011573} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.668211] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.668519] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] f5d92749-04d6-4935-8dc6-afb692222df0/f5d92749-04d6-4935-8dc6-afb692222df0.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 926.668829] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-db669a18-0c8b-4a6a-b7c4-12bdcd00017e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.676080] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 926.676080] env[63538]: value = "task-5101240" [ 926.676080] env[63538]: _type = "Task" [ 926.676080] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.684585] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101240, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.714508] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5101238, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.786117] env[63538]: DEBUG oslo_vmware.api [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101239, 'name': ReconfigVM_Task, 'duration_secs': 0.323649} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.786966] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Reconfigured VM instance instance-0000004a to attach disk [datastore2] e447c109-4cef-4cc7-9acf-61abc0f47482/faabbca4-e27b-433a-b93d-f059fd73bc92-rescue.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 926.788067] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-027cdb19-78dc-4c7a-a921-7b3b72c5c619 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.815338] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e72bcc17-8fce-4e46-945e-d1df58f572cc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.834599] env[63538]: DEBUG oslo_vmware.api [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Waiting for the task: (returnval){ [ 926.834599] env[63538]: value = "task-5101241" [ 926.834599] env[63538]: _type = "Task" [ 926.834599] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.844932] env[63538]: DEBUG oslo_vmware.api [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101241, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.870430] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.229s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.873493] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.188s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.875486] env[63538]: INFO nova.compute.claims [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 926.900837] env[63538]: INFO nova.scheduler.client.report [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Deleted allocations for instance 8fb62f47-cbf2-4b46-bc33-845e832f9ef0 [ 927.058196] env[63538]: DEBUG nova.network.neutron [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 927.187616] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101240, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.216271] env[63538]: DEBUG oslo_vmware.api [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5101238, 'name': PowerOnVM_Task, 'duration_secs': 1.409154} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.216605] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 927.216843] env[63538]: INFO nova.compute.manager [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Took 12.26 seconds to spawn the instance on the hypervisor. [ 927.217024] env[63538]: DEBUG nova.compute.manager [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 927.218015] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c98ab0-97e0-4646-9abf-a725072caa69 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.327647] env[63538]: DEBUG nova.network.neutron [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Updating instance_info_cache with network_info: [{"id": "31cf3b33-b97d-4183-a21c-80e24e27351f", "address": "fa:16:3e:e0:a1:3e", "network": {"id": "8facc2ff-7a17-4beb-ad4f-7cf9d95cc93c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-137157780-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a701618902d411b8af203fdbb1069be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31cf3b33-b9", "ovs_interfaceid": "31cf3b33-b97d-4183-a21c-80e24e27351f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.351243] env[63538]: DEBUG oslo_vmware.api [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101241, 'name': ReconfigVM_Task, 'duration_secs': 0.434446} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.351631] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 927.352313] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ce1d408-f25c-4192-9ed8-b6b79089719c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.362564] env[63538]: DEBUG oslo_vmware.api [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Waiting for the task: (returnval){ [ 927.362564] env[63538]: value = "task-5101242" [ 927.362564] env[63538]: _type = "Task" [ 927.362564] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.374315] env[63538]: DEBUG oslo_vmware.api [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101242, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.411410] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2da88adf-f9c3-463f-99cc-a5578a41bedc tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "8fb62f47-cbf2-4b46-bc33-845e832f9ef0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.347s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.568927] env[63538]: DEBUG nova.compute.manager [req-1f7b6d2d-ec3f-45a4-ada4-79c559069958 req-55f596a3-952c-4119-88c1-e4d37ab1b974 service nova] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Received event network-vif-plugged-31cf3b33-b97d-4183-a21c-80e24e27351f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 927.568979] env[63538]: DEBUG oslo_concurrency.lockutils [req-1f7b6d2d-ec3f-45a4-ada4-79c559069958 req-55f596a3-952c-4119-88c1-e4d37ab1b974 service nova] Acquiring lock "4ec5d3a2-8b29-4074-b323-f94704043b8b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.571598] env[63538]: DEBUG oslo_concurrency.lockutils [req-1f7b6d2d-ec3f-45a4-ada4-79c559069958 req-55f596a3-952c-4119-88c1-e4d37ab1b974 service nova] Lock "4ec5d3a2-8b29-4074-b323-f94704043b8b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.571598] env[63538]: DEBUG oslo_concurrency.lockutils [req-1f7b6d2d-ec3f-45a4-ada4-79c559069958 req-55f596a3-952c-4119-88c1-e4d37ab1b974 service nova] Lock "4ec5d3a2-8b29-4074-b323-f94704043b8b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.571598] env[63538]: DEBUG nova.compute.manager [req-1f7b6d2d-ec3f-45a4-ada4-79c559069958 req-55f596a3-952c-4119-88c1-e4d37ab1b974 service nova] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] No waiting events found dispatching network-vif-plugged-31cf3b33-b97d-4183-a21c-80e24e27351f {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 927.571598] env[63538]: WARNING nova.compute.manager [req-1f7b6d2d-ec3f-45a4-ada4-79c559069958 req-55f596a3-952c-4119-88c1-e4d37ab1b974 service nova] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Received unexpected event network-vif-plugged-31cf3b33-b97d-4183-a21c-80e24e27351f for instance with vm_state building and task_state spawning. [ 927.571598] env[63538]: DEBUG nova.compute.manager [req-1f7b6d2d-ec3f-45a4-ada4-79c559069958 req-55f596a3-952c-4119-88c1-e4d37ab1b974 service nova] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Received event network-changed-31cf3b33-b97d-4183-a21c-80e24e27351f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 927.571598] env[63538]: DEBUG nova.compute.manager [req-1f7b6d2d-ec3f-45a4-ada4-79c559069958 req-55f596a3-952c-4119-88c1-e4d37ab1b974 service nova] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Refreshing instance network info cache due to event network-changed-31cf3b33-b97d-4183-a21c-80e24e27351f. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 927.571598] env[63538]: DEBUG oslo_concurrency.lockutils [req-1f7b6d2d-ec3f-45a4-ada4-79c559069958 req-55f596a3-952c-4119-88c1-e4d37ab1b974 service nova] Acquiring lock "refresh_cache-4ec5d3a2-8b29-4074-b323-f94704043b8b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.635643] env[63538]: DEBUG oslo_concurrency.lockutils [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "c8a02fa6-5232-4dde-b6dd-0da1089b6bbf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.636059] env[63538]: DEBUG oslo_concurrency.lockutils [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "c8a02fa6-5232-4dde-b6dd-0da1089b6bbf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.636291] env[63538]: DEBUG oslo_concurrency.lockutils [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "c8a02fa6-5232-4dde-b6dd-0da1089b6bbf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.636925] env[63538]: DEBUG oslo_concurrency.lockutils [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "c8a02fa6-5232-4dde-b6dd-0da1089b6bbf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.637173] env[63538]: DEBUG oslo_concurrency.lockutils [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "c8a02fa6-5232-4dde-b6dd-0da1089b6bbf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.639497] env[63538]: INFO nova.compute.manager [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Terminating instance [ 927.641374] env[63538]: DEBUG nova.compute.manager [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 927.641598] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 927.642465] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5a2d2a3-f426-4865-8d47-b8b37b6c51c0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.651124] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 927.651399] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-088f5bfb-8e3a-41c2-a9de-2e59d1d010f0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.659021] env[63538]: DEBUG oslo_vmware.api [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 927.659021] env[63538]: value = "task-5101243" [ 927.659021] env[63538]: _type = "Task" [ 927.659021] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.669549] env[63538]: DEBUG oslo_vmware.api [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5101243, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.688521] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101240, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52674} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.688806] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] f5d92749-04d6-4935-8dc6-afb692222df0/f5d92749-04d6-4935-8dc6-afb692222df0.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 927.689049] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 927.689334] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b723dfdb-9134-4bde-aab2-6ad8298c6d9d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.698185] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 927.698185] env[63538]: value = "task-5101244" [ 927.698185] env[63538]: _type = "Task" [ 927.698185] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.707906] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101244, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.744981] env[63538]: INFO nova.compute.manager [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Took 51.53 seconds to build instance. [ 927.830680] env[63538]: DEBUG oslo_concurrency.lockutils [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Releasing lock "refresh_cache-4ec5d3a2-8b29-4074-b323-f94704043b8b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.831171] env[63538]: DEBUG nova.compute.manager [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Instance network_info: |[{"id": "31cf3b33-b97d-4183-a21c-80e24e27351f", "address": "fa:16:3e:e0:a1:3e", "network": {"id": "8facc2ff-7a17-4beb-ad4f-7cf9d95cc93c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-137157780-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a701618902d411b8af203fdbb1069be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31cf3b33-b9", "ovs_interfaceid": "31cf3b33-b97d-4183-a21c-80e24e27351f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 927.831630] env[63538]: DEBUG oslo_concurrency.lockutils [req-1f7b6d2d-ec3f-45a4-ada4-79c559069958 req-55f596a3-952c-4119-88c1-e4d37ab1b974 service nova] Acquired lock "refresh_cache-4ec5d3a2-8b29-4074-b323-f94704043b8b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.831908] env[63538]: DEBUG nova.network.neutron [req-1f7b6d2d-ec3f-45a4-ada4-79c559069958 req-55f596a3-952c-4119-88c1-e4d37ab1b974 service nova] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Refreshing network info cache for port 31cf3b33-b97d-4183-a21c-80e24e27351f {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 927.833646] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:a1:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b80dd748-3d7e-4a23-a38d-9e79a3881452', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '31cf3b33-b97d-4183-a21c-80e24e27351f', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 927.841721] env[63538]: DEBUG oslo.service.loopingcall [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 927.843535] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 927.844088] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-257b1c31-852e-40ba-897c-5051422044f6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.865655] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 927.865655] env[63538]: value = "task-5101245" [ 927.865655] env[63538]: _type = "Task" [ 927.865655] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.879324] env[63538]: DEBUG oslo_vmware.api [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101242, 'name': PowerOnVM_Task, 'duration_secs': 0.435136} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.883605] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 927.886190] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101245, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.887875] env[63538]: DEBUG nova.compute.manager [None req-188bd08d-cb21-4fd2-84c9-bd2a4c0c5732 tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 927.891522] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baea8575-a63e-4c3d-9709-66ca4e6f8323 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.173803] env[63538]: DEBUG oslo_vmware.api [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5101243, 'name': PowerOffVM_Task, 'duration_secs': 0.219691} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.174191] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 928.174277] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 928.174541] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5134d57c-f97a-45ff-b844-c96f5a06d1c7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.210550] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101244, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083938} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.210888] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 928.211798] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2365b7d7-bd08-4a9d-b550-6a7c51d41032 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.239576] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] f5d92749-04d6-4935-8dc6-afb692222df0/f5d92749-04d6-4935-8dc6-afb692222df0.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 928.243082] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb402412-3397-4015-90ed-b6e69daf2131 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.259062] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5648f11e-81d8-46f1-af42-c7414576bc53 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Lock "b47925eb-3d97-415b-9410-2e325da5ce79" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.412s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.266175] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 928.266175] env[63538]: value = "task-5101247" [ 928.266175] env[63538]: _type = "Task" [ 928.266175] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.284532] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101247, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.327196] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7aa75a0-0a6b-4ab0-91df-b0f1314122e6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.335548] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c57db63-ec49-4dc4-a98b-4d3aa3121d52 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.369063] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9eefd7f-82c5-49f4-bd5a-a4766563a4db {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.382914] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc6c1dc-44c3-41a5-8bc4-dc382216d316 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.386642] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101245, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.397075] env[63538]: DEBUG nova.compute.provider_tree [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 928.582569] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 928.582900] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 928.583103] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Deleting the datastore file [datastore1] c8a02fa6-5232-4dde-b6dd-0da1089b6bbf {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 928.585924] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c71fa380-92cb-445c-b0d6-95bb70259fad {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.595417] env[63538]: DEBUG oslo_vmware.api [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for the task: (returnval){ [ 928.595417] env[63538]: value = "task-5101248" [ 928.595417] env[63538]: _type = "Task" [ 928.595417] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.605868] env[63538]: DEBUG oslo_vmware.api [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5101248, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.636707] env[63538]: DEBUG nova.network.neutron [req-1f7b6d2d-ec3f-45a4-ada4-79c559069958 req-55f596a3-952c-4119-88c1-e4d37ab1b974 service nova] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Updated VIF entry in instance network info cache for port 31cf3b33-b97d-4183-a21c-80e24e27351f. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 928.637120] env[63538]: DEBUG nova.network.neutron [req-1f7b6d2d-ec3f-45a4-ada4-79c559069958 req-55f596a3-952c-4119-88c1-e4d37ab1b974 service nova] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Updating instance_info_cache with network_info: [{"id": "31cf3b33-b97d-4183-a21c-80e24e27351f", "address": "fa:16:3e:e0:a1:3e", "network": {"id": "8facc2ff-7a17-4beb-ad4f-7cf9d95cc93c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-137157780-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a701618902d411b8af203fdbb1069be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31cf3b33-b9", "ovs_interfaceid": "31cf3b33-b97d-4183-a21c-80e24e27351f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.780159] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101247, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.881798] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101245, 'name': CreateVM_Task, 'duration_secs': 0.713367} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.881978] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 928.882710] env[63538]: DEBUG oslo_concurrency.lockutils [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.882886] env[63538]: DEBUG oslo_concurrency.lockutils [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.883294] env[63538]: DEBUG oslo_concurrency.lockutils [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 928.883571] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9a4d7bc-276f-4810-9634-c862a5fcfcee {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.889212] env[63538]: DEBUG oslo_vmware.api [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 928.889212] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bff62c-1632-e991-6482-ffb0bf2f9a39" [ 928.889212] env[63538]: _type = "Task" [ 928.889212] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.898419] env[63538]: DEBUG oslo_vmware.api [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bff62c-1632-e991-6482-ffb0bf2f9a39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.901460] env[63538]: DEBUG nova.scheduler.client.report [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 929.106534] env[63538]: DEBUG oslo_vmware.api [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Task: {'id': task-5101248, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.308128} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.106748] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 929.106798] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 929.106986] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 929.107190] env[63538]: INFO nova.compute.manager [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Took 1.47 seconds to destroy the instance on the hypervisor. [ 929.107499] env[63538]: DEBUG oslo.service.loopingcall [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 929.108152] env[63538]: DEBUG nova.compute.manager [-] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 929.108259] env[63538]: DEBUG nova.network.neutron [-] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 929.141445] env[63538]: DEBUG oslo_concurrency.lockutils [req-1f7b6d2d-ec3f-45a4-ada4-79c559069958 req-55f596a3-952c-4119-88c1-e4d37ab1b974 service nova] Releasing lock "refresh_cache-4ec5d3a2-8b29-4074-b323-f94704043b8b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.279229] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101247, 'name': ReconfigVM_Task, 'duration_secs': 0.752548} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.279636] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Reconfigured VM instance instance-0000004c to attach disk [datastore1] f5d92749-04d6-4935-8dc6-afb692222df0/f5d92749-04d6-4935-8dc6-afb692222df0.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 929.280706] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4e8fd209-33ac-4f4e-820b-43c1d5077747 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.288887] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 929.288887] env[63538]: value = "task-5101249" [ 929.288887] env[63538]: _type = "Task" [ 929.288887] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.301135] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101249, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.352907] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Acquiring lock "b47925eb-3d97-415b-9410-2e325da5ce79" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.352907] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Lock "b47925eb-3d97-415b-9410-2e325da5ce79" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.352907] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Acquiring lock "b47925eb-3d97-415b-9410-2e325da5ce79-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.352907] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Lock "b47925eb-3d97-415b-9410-2e325da5ce79-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.352907] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Lock "b47925eb-3d97-415b-9410-2e325da5ce79-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.355313] env[63538]: INFO nova.compute.manager [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Terminating instance [ 929.358877] env[63538]: DEBUG nova.compute.manager [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 929.358877] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 929.358877] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80955e86-4f7b-4769-85de-fb0d67699900 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.369150] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 929.369150] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f3c456c-c1fa-4c2b-a4cd-dbe89b580a39 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.375410] env[63538]: DEBUG oslo_vmware.api [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Waiting for the task: (returnval){ [ 929.375410] env[63538]: value = "task-5101250" [ 929.375410] env[63538]: _type = "Task" [ 929.375410] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.384889] env[63538]: DEBUG oslo_vmware.api [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5101250, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.400840] env[63538]: DEBUG oslo_vmware.api [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bff62c-1632-e991-6482-ffb0bf2f9a39, 'name': SearchDatastore_Task, 'duration_secs': 0.013683} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.401232] env[63538]: DEBUG oslo_concurrency.lockutils [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.401521] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 929.401828] env[63538]: DEBUG oslo_concurrency.lockutils [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.402046] env[63538]: DEBUG oslo_concurrency.lockutils [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.402284] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 929.402618] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-debcc4c1-bbb6-4be9-ae74-36a5cf04c3b4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.406946] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.534s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.408108] env[63538]: DEBUG nova.compute.manager [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 929.411821] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.529s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.413482] env[63538]: INFO nova.compute.claims [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 929.422425] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 929.422795] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 929.426768] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-418d1255-8c48-4a95-903f-59274d1efc48 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.434796] env[63538]: DEBUG oslo_vmware.api [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 929.434796] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ac29e6-27af-39a2-3a5a-1050376fc705" [ 929.434796] env[63538]: _type = "Task" [ 929.434796] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.450250] env[63538]: DEBUG oslo_vmware.api [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ac29e6-27af-39a2-3a5a-1050376fc705, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.661816] env[63538]: DEBUG nova.compute.manager [req-10dad7a4-2a89-441c-8fd3-17c25a8d0fd0 req-8af0f902-3f85-4522-87cb-da736bcd4f26 service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Received event network-changed-312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 929.663964] env[63538]: DEBUG nova.compute.manager [req-10dad7a4-2a89-441c-8fd3-17c25a8d0fd0 req-8af0f902-3f85-4522-87cb-da736bcd4f26 service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Refreshing instance network info cache due to event network-changed-312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 929.663964] env[63538]: DEBUG oslo_concurrency.lockutils [req-10dad7a4-2a89-441c-8fd3-17c25a8d0fd0 req-8af0f902-3f85-4522-87cb-da736bcd4f26 service nova] Acquiring lock "refresh_cache-e447c109-4cef-4cc7-9acf-61abc0f47482" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.663964] env[63538]: DEBUG oslo_concurrency.lockutils [req-10dad7a4-2a89-441c-8fd3-17c25a8d0fd0 req-8af0f902-3f85-4522-87cb-da736bcd4f26 service nova] Acquired lock "refresh_cache-e447c109-4cef-4cc7-9acf-61abc0f47482" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.663964] env[63538]: DEBUG nova.network.neutron [req-10dad7a4-2a89-441c-8fd3-17c25a8d0fd0 req-8af0f902-3f85-4522-87cb-da736bcd4f26 service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Refreshing network info cache for port 312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 929.802622] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101249, 'name': Rename_Task, 'duration_secs': 0.254453} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.803223] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 929.803396] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d4ddf592-e9ef-440c-8e78-7d3eb1548034 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.813910] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 929.813910] env[63538]: value = "task-5101251" [ 929.813910] env[63538]: _type = "Task" [ 929.813910] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.825025] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101251, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.886910] env[63538]: DEBUG oslo_vmware.api [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5101250, 'name': PowerOffVM_Task, 'duration_secs': 0.365769} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.887430] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 929.887639] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 929.887933] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-080e41d2-c677-49de-a22c-df21ceff2ce3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.893144] env[63538]: DEBUG nova.compute.manager [req-f8d35794-11eb-4a65-a33a-c7232038280d req-8de724c8-e226-46bb-8f16-90fbc7bb28f3 service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Received event network-changed-312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 929.893275] env[63538]: DEBUG nova.compute.manager [req-f8d35794-11eb-4a65-a33a-c7232038280d req-8de724c8-e226-46bb-8f16-90fbc7bb28f3 service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Refreshing instance network info cache due to event network-changed-312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 929.893454] env[63538]: DEBUG oslo_concurrency.lockutils [req-f8d35794-11eb-4a65-a33a-c7232038280d req-8de724c8-e226-46bb-8f16-90fbc7bb28f3 service nova] Acquiring lock "refresh_cache-e447c109-4cef-4cc7-9acf-61abc0f47482" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.922866] env[63538]: DEBUG nova.compute.utils [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 929.928425] env[63538]: DEBUG nova.compute.manager [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 929.928581] env[63538]: DEBUG nova.network.neutron [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 929.949134] env[63538]: DEBUG oslo_vmware.api [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ac29e6-27af-39a2-3a5a-1050376fc705, 'name': SearchDatastore_Task, 'duration_secs': 0.013925} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.950452] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83e99dea-e6f4-4040-8e04-0a7ae2b8417a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.958127] env[63538]: DEBUG oslo_vmware.api [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 929.958127] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d06b15-e752-fdb3-1aaf-55594affe8c1" [ 929.958127] env[63538]: _type = "Task" [ 929.958127] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.969709] env[63538]: DEBUG oslo_vmware.api [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d06b15-e752-fdb3-1aaf-55594affe8c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.015614] env[63538]: DEBUG nova.policy [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '87c19c9ce3594acd96c1c215ef8ea555', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '422f50dc66ec48b7b262643390072f3d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 930.030582] env[63538]: DEBUG nova.network.neutron [-] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.277557] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 930.277821] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 930.278026] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Deleting the datastore file [datastore2] b47925eb-3d97-415b-9410-2e325da5ce79 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 930.278381] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ffb1fe71-fb63-4d66-ac5f-e78c246d632e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.286484] env[63538]: DEBUG oslo_vmware.api [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Waiting for the task: (returnval){ [ 930.286484] env[63538]: value = "task-5101253" [ 930.286484] env[63538]: _type = "Task" [ 930.286484] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.301431] env[63538]: DEBUG oslo_vmware.api [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5101253, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.330148] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101251, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.431936] env[63538]: DEBUG nova.compute.manager [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 930.476406] env[63538]: DEBUG oslo_vmware.api [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d06b15-e752-fdb3-1aaf-55594affe8c1, 'name': SearchDatastore_Task, 'duration_secs': 0.012606} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.477261] env[63538]: DEBUG oslo_concurrency.lockutils [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.481185] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 4ec5d3a2-8b29-4074-b323-f94704043b8b/4ec5d3a2-8b29-4074-b323-f94704043b8b.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 930.483197] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-efc0fbe8-0344-405d-9dfb-7f5ad631ebf8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.495035] env[63538]: DEBUG oslo_vmware.api [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 930.495035] env[63538]: value = "task-5101254" [ 930.495035] env[63538]: _type = "Task" [ 930.495035] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.508058] env[63538]: DEBUG oslo_vmware.api [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101254, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.538137] env[63538]: INFO nova.compute.manager [-] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Took 1.43 seconds to deallocate network for instance. [ 930.664152] env[63538]: DEBUG nova.network.neutron [req-10dad7a4-2a89-441c-8fd3-17c25a8d0fd0 req-8af0f902-3f85-4522-87cb-da736bcd4f26 service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Updated VIF entry in instance network info cache for port 312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 930.664152] env[63538]: DEBUG nova.network.neutron [req-10dad7a4-2a89-441c-8fd3-17c25a8d0fd0 req-8af0f902-3f85-4522-87cb-da736bcd4f26 service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Updating instance_info_cache with network_info: [{"id": "312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b", "address": "fa:16:3e:0b:f9:6b", "network": {"id": "268f8bc3-cc14-4aad-aa44-87cd6e506681", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-818769698-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b78ab12ed0254cf4b3ccb7c231ca810d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap312c7ab1-93", "ovs_interfaceid": "312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.718502] env[63538]: DEBUG nova.network.neutron [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Successfully created port: 674bcf37-4948-4ce1-8f18-5ba7912f2544 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 930.802724] env[63538]: DEBUG oslo_vmware.api [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Task: {'id': task-5101253, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.303407} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.805928] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 930.806194] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 930.806390] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 930.806572] env[63538]: INFO nova.compute.manager [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Took 1.45 seconds to destroy the instance on the hypervisor. [ 930.806843] env[63538]: DEBUG oslo.service.loopingcall [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 930.807413] env[63538]: DEBUG nova.compute.manager [-] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 930.807529] env[63538]: DEBUG nova.network.neutron [-] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 930.830260] env[63538]: DEBUG oslo_vmware.api [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101251, 'name': PowerOnVM_Task, 'duration_secs': 0.713599} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.830403] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 930.830490] env[63538]: INFO nova.compute.manager [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Took 8.87 seconds to spawn the instance on the hypervisor. [ 930.830670] env[63538]: DEBUG nova.compute.manager [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 930.831542] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-862de9db-244b-4e0f-a096-61fb64fb5e00 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.950040] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4655355f-455a-4e05-9a18-804a59d00ded {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.957418] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ffd1a8-1788-48d7-ac5b-b74b9ffd4c8e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.002302] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8beb5b3-ff46-422e-a583-8664d4dd4fae {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.018502] env[63538]: DEBUG oslo_vmware.api [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101254, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.020684] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7461ce2b-e5ce-4db9-bdce-1368bf800aa4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.042893] env[63538]: DEBUG nova.compute.provider_tree [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 931.049082] env[63538]: DEBUG oslo_concurrency.lockutils [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.166025] env[63538]: DEBUG oslo_concurrency.lockutils [req-10dad7a4-2a89-441c-8fd3-17c25a8d0fd0 req-8af0f902-3f85-4522-87cb-da736bcd4f26 service nova] Releasing lock "refresh_cache-e447c109-4cef-4cc7-9acf-61abc0f47482" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.166380] env[63538]: DEBUG nova.compute.manager [req-10dad7a4-2a89-441c-8fd3-17c25a8d0fd0 req-8af0f902-3f85-4522-87cb-da736bcd4f26 service nova] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Received event network-vif-deleted-8d80ee33-5e67-4651-a9b1-1f58ca92fb2e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 931.166626] env[63538]: INFO nova.compute.manager [req-10dad7a4-2a89-441c-8fd3-17c25a8d0fd0 req-8af0f902-3f85-4522-87cb-da736bcd4f26 service nova] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Neutron deleted interface 8d80ee33-5e67-4651-a9b1-1f58ca92fb2e; detaching it from the instance and deleting it from the info cache [ 931.166893] env[63538]: DEBUG nova.network.neutron [req-10dad7a4-2a89-441c-8fd3-17c25a8d0fd0 req-8af0f902-3f85-4522-87cb-da736bcd4f26 service nova] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.168513] env[63538]: DEBUG oslo_concurrency.lockutils [req-f8d35794-11eb-4a65-a33a-c7232038280d req-8de724c8-e226-46bb-8f16-90fbc7bb28f3 service nova] Acquired lock "refresh_cache-e447c109-4cef-4cc7-9acf-61abc0f47482" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.168729] env[63538]: DEBUG nova.network.neutron [req-f8d35794-11eb-4a65-a33a-c7232038280d req-8de724c8-e226-46bb-8f16-90fbc7bb28f3 service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Refreshing network info cache for port 312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 931.356854] env[63538]: INFO nova.compute.manager [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Took 43.92 seconds to build instance. [ 931.443202] env[63538]: DEBUG nova.compute.manager [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 931.471676] env[63538]: DEBUG nova.virt.hardware [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 931.471915] env[63538]: DEBUG nova.virt.hardware [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 931.472090] env[63538]: DEBUG nova.virt.hardware [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 931.472280] env[63538]: DEBUG nova.virt.hardware [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 931.472430] env[63538]: DEBUG nova.virt.hardware [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 931.472615] env[63538]: DEBUG nova.virt.hardware [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 931.472782] env[63538]: DEBUG nova.virt.hardware [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 931.473009] env[63538]: DEBUG nova.virt.hardware [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 931.473208] env[63538]: DEBUG nova.virt.hardware [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 931.473379] env[63538]: DEBUG nova.virt.hardware [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 931.473556] env[63538]: DEBUG nova.virt.hardware [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 931.476810] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f0e757-6f83-4bc6-9472-2667d00ae004 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.484884] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51228b6a-3a65-458d-94b6-904b7e7a2a11 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.513038] env[63538]: DEBUG oslo_vmware.api [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101254, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.832746} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.516038] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 4ec5d3a2-8b29-4074-b323-f94704043b8b/4ec5d3a2-8b29-4074-b323-f94704043b8b.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 931.516038] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 931.516038] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6fb868c3-955b-4175-8dba-7f5dba8dae39 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.521655] env[63538]: DEBUG oslo_vmware.api [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 931.521655] env[63538]: value = "task-5101255" [ 931.521655] env[63538]: _type = "Task" [ 931.521655] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.532054] env[63538]: DEBUG oslo_vmware.api [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101255, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.546286] env[63538]: DEBUG nova.scheduler.client.report [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 931.673376] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4378399c-9b17-4f1a-80b2-7e7f88f2a0e1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.688909] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb001be3-3e7e-436e-800c-7c50d4712bd1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.723837] env[63538]: DEBUG nova.compute.manager [req-10dad7a4-2a89-441c-8fd3-17c25a8d0fd0 req-8af0f902-3f85-4522-87cb-da736bcd4f26 service nova] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Detach interface failed, port_id=8d80ee33-5e67-4651-a9b1-1f58ca92fb2e, reason: Instance c8a02fa6-5232-4dde-b6dd-0da1089b6bbf could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 931.797297] env[63538]: DEBUG nova.compute.manager [req-cb9a3cfa-d583-46e3-b993-253f3e0488a7 req-ad61808c-21e0-4e2c-8531-03ad4001e844 service nova] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Received event network-vif-deleted-077e00c6-2e89-440c-8653-8742862e8000 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 931.797560] env[63538]: INFO nova.compute.manager [req-cb9a3cfa-d583-46e3-b993-253f3e0488a7 req-ad61808c-21e0-4e2c-8531-03ad4001e844 service nova] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Neutron deleted interface 077e00c6-2e89-440c-8653-8742862e8000; detaching it from the instance and deleting it from the info cache [ 931.797760] env[63538]: DEBUG nova.network.neutron [req-cb9a3cfa-d583-46e3-b993-253f3e0488a7 req-ad61808c-21e0-4e2c-8531-03ad4001e844 service nova] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Updating instance_info_cache with network_info: [{"id": "d5a8d000-6995-4d04-b7ff-431a2456a13e", "address": "fa:16:3e:f8:6e:97", "network": {"id": "fbcfee83-91c7-4538-bbd4-d5c6159d6aa1", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1159869692", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.42", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6906dcd3e0074931bdbe4233fbc2bf95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5a8d000-69", "ovs_interfaceid": "d5a8d000-6995-4d04-b7ff-431a2456a13e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.863576] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3be93a07-0d75-4fdc-bcb8-f0fab9b2c0db tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "f5d92749-04d6-4935-8dc6-afb692222df0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.649s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.003167] env[63538]: DEBUG nova.network.neutron [req-f8d35794-11eb-4a65-a33a-c7232038280d req-8de724c8-e226-46bb-8f16-90fbc7bb28f3 service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Updated VIF entry in instance network info cache for port 312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 932.003526] env[63538]: DEBUG nova.network.neutron [req-f8d35794-11eb-4a65-a33a-c7232038280d req-8de724c8-e226-46bb-8f16-90fbc7bb28f3 service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Updating instance_info_cache with network_info: [{"id": "312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b", "address": "fa:16:3e:0b:f9:6b", "network": {"id": "268f8bc3-cc14-4aad-aa44-87cd6e506681", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-818769698-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b78ab12ed0254cf4b3ccb7c231ca810d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap312c7ab1-93", "ovs_interfaceid": "312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.036028] env[63538]: DEBUG oslo_vmware.api [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101255, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.32069} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.036028] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 932.036028] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca1f51f6-faf8-4acf-9cc8-eecf608a7017 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.051925] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.640s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.052507] env[63538]: DEBUG nova.compute.manager [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 932.064073] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 4ec5d3a2-8b29-4074-b323-f94704043b8b/4ec5d3a2-8b29-4074-b323-f94704043b8b.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 932.064767] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.059s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.065065] env[63538]: DEBUG nova.objects.instance [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lazy-loading 'resources' on Instance uuid edc670dd-732a-4c54-924c-c99ee539d4d9 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 932.066634] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a1be9e5d-65c9-444a-be6a-12a3ae13a7d7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.092546] env[63538]: DEBUG oslo_vmware.api [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 932.092546] env[63538]: value = "task-5101256" [ 932.092546] env[63538]: _type = "Task" [ 932.092546] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.103156] env[63538]: DEBUG oslo_vmware.api [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101256, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.123499] env[63538]: DEBUG nova.compute.manager [req-f1a3198a-c428-4f57-a290-64a4fa86aa07 req-77c6b0cb-714c-42f7-9eb1-aa0eb496bd44 service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Received event network-changed-312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 932.123791] env[63538]: DEBUG nova.compute.manager [req-f1a3198a-c428-4f57-a290-64a4fa86aa07 req-77c6b0cb-714c-42f7-9eb1-aa0eb496bd44 service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Refreshing instance network info cache due to event network-changed-312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 932.124048] env[63538]: DEBUG oslo_concurrency.lockutils [req-f1a3198a-c428-4f57-a290-64a4fa86aa07 req-77c6b0cb-714c-42f7-9eb1-aa0eb496bd44 service nova] Acquiring lock "refresh_cache-e447c109-4cef-4cc7-9acf-61abc0f47482" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.149980] env[63538]: DEBUG nova.network.neutron [-] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.301834] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d7b17598-3e96-47f1-ae4c-b111a8c50fde {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.315863] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bec3aef-d983-433a-a4f9-3f6a175d22f2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.366554] env[63538]: DEBUG nova.compute.manager [req-cb9a3cfa-d583-46e3-b993-253f3e0488a7 req-ad61808c-21e0-4e2c-8531-03ad4001e844 service nova] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Detach interface failed, port_id=077e00c6-2e89-440c-8653-8742862e8000, reason: Instance b47925eb-3d97-415b-9410-2e325da5ce79 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 932.366554] env[63538]: DEBUG nova.compute.manager [req-cb9a3cfa-d583-46e3-b993-253f3e0488a7 req-ad61808c-21e0-4e2c-8531-03ad4001e844 service nova] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Received event network-vif-deleted-d5a8d000-6995-4d04-b7ff-431a2456a13e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 932.367029] env[63538]: INFO nova.compute.manager [req-cb9a3cfa-d583-46e3-b993-253f3e0488a7 req-ad61808c-21e0-4e2c-8531-03ad4001e844 service nova] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Neutron deleted interface d5a8d000-6995-4d04-b7ff-431a2456a13e; detaching it from the instance and deleting it from the info cache [ 932.367371] env[63538]: DEBUG nova.network.neutron [req-cb9a3cfa-d583-46e3-b993-253f3e0488a7 req-ad61808c-21e0-4e2c-8531-03ad4001e844 service nova] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.508363] env[63538]: DEBUG oslo_concurrency.lockutils [req-f8d35794-11eb-4a65-a33a-c7232038280d req-8de724c8-e226-46bb-8f16-90fbc7bb28f3 service nova] Releasing lock "refresh_cache-e447c109-4cef-4cc7-9acf-61abc0f47482" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.508363] env[63538]: DEBUG oslo_concurrency.lockutils [req-f1a3198a-c428-4f57-a290-64a4fa86aa07 req-77c6b0cb-714c-42f7-9eb1-aa0eb496bd44 service nova] Acquired lock "refresh_cache-e447c109-4cef-4cc7-9acf-61abc0f47482" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.508363] env[63538]: DEBUG nova.network.neutron [req-f1a3198a-c428-4f57-a290-64a4fa86aa07 req-77c6b0cb-714c-42f7-9eb1-aa0eb496bd44 service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Refreshing network info cache for port 312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 932.568356] env[63538]: DEBUG nova.compute.utils [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 932.568821] env[63538]: DEBUG nova.compute.manager [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 932.568989] env[63538]: DEBUG nova.network.neutron [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 932.608910] env[63538]: DEBUG oslo_vmware.api [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101256, 'name': ReconfigVM_Task, 'duration_secs': 0.402753} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.609245] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 4ec5d3a2-8b29-4074-b323-f94704043b8b/4ec5d3a2-8b29-4074-b323-f94704043b8b.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 932.610188] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75e18adc-8251-4612-900b-2516f969f896 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.619286] env[63538]: DEBUG oslo_vmware.api [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 932.619286] env[63538]: value = "task-5101257" [ 932.619286] env[63538]: _type = "Task" [ 932.619286] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.629587] env[63538]: DEBUG oslo_vmware.api [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101257, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.655673] env[63538]: INFO nova.compute.manager [-] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Took 1.85 seconds to deallocate network for instance. [ 932.669158] env[63538]: DEBUG nova.policy [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '21f98d3c77454d809c0aa1ca5c7dc6d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '492427e54e1048f292dab2abdac71af5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 932.870800] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d823b801-7d4e-432f-9199-f5cf5e3585dc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.880862] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39df8ab4-172e-4599-8bbb-6bb37661d9ae {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.928695] env[63538]: DEBUG nova.compute.manager [req-cb9a3cfa-d583-46e3-b993-253f3e0488a7 req-ad61808c-21e0-4e2c-8531-03ad4001e844 service nova] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Detach interface failed, port_id=d5a8d000-6995-4d04-b7ff-431a2456a13e, reason: Instance b47925eb-3d97-415b-9410-2e325da5ce79 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 933.075305] env[63538]: DEBUG nova.compute.manager [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 933.095382] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be9aa0c9-b8b8-4eaf-84fb-aff6f6296961 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.109756] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84855ae6-187a-4ed5-81c8-f9033bc892fd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.154842] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c23a611f-615c-4f41-969b-d5d22bde15e7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.162272] env[63538]: DEBUG oslo_vmware.api [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101257, 'name': Rename_Task, 'duration_secs': 0.159836} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.163249] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.163944] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 933.164519] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fac1918e-b0e8-43a2-bc87-8f90e4e5c974 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.170041] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff586829-8498-4425-8eec-592c107a05da {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.175870] env[63538]: DEBUG oslo_vmware.api [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 933.175870] env[63538]: value = "task-5101258" [ 933.175870] env[63538]: _type = "Task" [ 933.175870] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.189372] env[63538]: DEBUG nova.compute.provider_tree [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 933.197973] env[63538]: DEBUG oslo_vmware.api [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101258, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.277758] env[63538]: DEBUG nova.compute.manager [req-5bdb2f5a-e8d8-488a-980c-9da58f33c3f6 req-912a52e1-d84e-44b6-bcfa-c59f60150c16 service nova] [instance: 90e56075-0d77-467f-90be-913315b63b33] Received event network-vif-plugged-674bcf37-4948-4ce1-8f18-5ba7912f2544 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 933.278727] env[63538]: DEBUG oslo_concurrency.lockutils [req-5bdb2f5a-e8d8-488a-980c-9da58f33c3f6 req-912a52e1-d84e-44b6-bcfa-c59f60150c16 service nova] Acquiring lock "90e56075-0d77-467f-90be-913315b63b33-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.278727] env[63538]: DEBUG oslo_concurrency.lockutils [req-5bdb2f5a-e8d8-488a-980c-9da58f33c3f6 req-912a52e1-d84e-44b6-bcfa-c59f60150c16 service nova] Lock "90e56075-0d77-467f-90be-913315b63b33-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.278727] env[63538]: DEBUG oslo_concurrency.lockutils [req-5bdb2f5a-e8d8-488a-980c-9da58f33c3f6 req-912a52e1-d84e-44b6-bcfa-c59f60150c16 service nova] Lock "90e56075-0d77-467f-90be-913315b63b33-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.278727] env[63538]: DEBUG nova.compute.manager [req-5bdb2f5a-e8d8-488a-980c-9da58f33c3f6 req-912a52e1-d84e-44b6-bcfa-c59f60150c16 service nova] [instance: 90e56075-0d77-467f-90be-913315b63b33] No waiting events found dispatching network-vif-plugged-674bcf37-4948-4ce1-8f18-5ba7912f2544 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 933.278727] env[63538]: WARNING nova.compute.manager [req-5bdb2f5a-e8d8-488a-980c-9da58f33c3f6 req-912a52e1-d84e-44b6-bcfa-c59f60150c16 service nova] [instance: 90e56075-0d77-467f-90be-913315b63b33] Received unexpected event network-vif-plugged-674bcf37-4948-4ce1-8f18-5ba7912f2544 for instance with vm_state building and task_state spawning. [ 933.363544] env[63538]: DEBUG nova.network.neutron [req-f1a3198a-c428-4f57-a290-64a4fa86aa07 req-77c6b0cb-714c-42f7-9eb1-aa0eb496bd44 service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Updated VIF entry in instance network info cache for port 312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 933.366176] env[63538]: DEBUG nova.network.neutron [req-f1a3198a-c428-4f57-a290-64a4fa86aa07 req-77c6b0cb-714c-42f7-9eb1-aa0eb496bd44 service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Updating instance_info_cache with network_info: [{"id": "312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b", "address": "fa:16:3e:0b:f9:6b", "network": {"id": "268f8bc3-cc14-4aad-aa44-87cd6e506681", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-818769698-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b78ab12ed0254cf4b3ccb7c231ca810d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap312c7ab1-93", "ovs_interfaceid": "312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.450532] env[63538]: DEBUG nova.network.neutron [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Successfully updated port: 674bcf37-4948-4ce1-8f18-5ba7912f2544 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 933.538627] env[63538]: DEBUG nova.network.neutron [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Successfully created port: 5a3ccff0-6550-429c-a4ce-0afa4c25230f {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 933.628470] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "cf72ac3d-4051-428a-b5bc-7f28accb13c0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.628756] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "cf72ac3d-4051-428a-b5bc-7f28accb13c0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.687412] env[63538]: DEBUG oslo_vmware.api [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101258, 'name': PowerOnVM_Task, 'duration_secs': 0.499781} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.687728] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 933.688322] env[63538]: INFO nova.compute.manager [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Took 9.16 seconds to spawn the instance on the hypervisor. [ 933.688322] env[63538]: DEBUG nova.compute.manager [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 933.688958] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c6ce4b6-6a94-447a-a00b-8716487bbc96 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.699648] env[63538]: DEBUG nova.scheduler.client.report [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 933.870214] env[63538]: DEBUG oslo_concurrency.lockutils [req-f1a3198a-c428-4f57-a290-64a4fa86aa07 req-77c6b0cb-714c-42f7-9eb1-aa0eb496bd44 service nova] Releasing lock "refresh_cache-e447c109-4cef-4cc7-9acf-61abc0f47482" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.956206] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "refresh_cache-90e56075-0d77-467f-90be-913315b63b33" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.956206] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquired lock "refresh_cache-90e56075-0d77-467f-90be-913315b63b33" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.956206] env[63538]: DEBUG nova.network.neutron [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 934.042876] env[63538]: DEBUG nova.compute.manager [req-d2fcb300-4464-4c4d-a32d-8019da6947d0 req-a2500dc0-e6a0-4dc7-a40f-2e2463673e63 service nova] [instance: 90e56075-0d77-467f-90be-913315b63b33] Received event network-changed-674bcf37-4948-4ce1-8f18-5ba7912f2544 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 934.042876] env[63538]: DEBUG nova.compute.manager [req-d2fcb300-4464-4c4d-a32d-8019da6947d0 req-a2500dc0-e6a0-4dc7-a40f-2e2463673e63 service nova] [instance: 90e56075-0d77-467f-90be-913315b63b33] Refreshing instance network info cache due to event network-changed-674bcf37-4948-4ce1-8f18-5ba7912f2544. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 934.043029] env[63538]: DEBUG oslo_concurrency.lockutils [req-d2fcb300-4464-4c4d-a32d-8019da6947d0 req-a2500dc0-e6a0-4dc7-a40f-2e2463673e63 service nova] Acquiring lock "refresh_cache-90e56075-0d77-467f-90be-913315b63b33" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.091277] env[63538]: DEBUG nova.compute.manager [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 934.133296] env[63538]: DEBUG nova.virt.hardware [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 934.133697] env[63538]: DEBUG nova.virt.hardware [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 934.135315] env[63538]: DEBUG nova.virt.hardware [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 934.135858] env[63538]: DEBUG nova.virt.hardware [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 934.136267] env[63538]: DEBUG nova.virt.hardware [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 934.136489] env[63538]: DEBUG nova.virt.hardware [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 934.136733] env[63538]: DEBUG nova.virt.hardware [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 934.136904] env[63538]: DEBUG nova.virt.hardware [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 934.137101] env[63538]: DEBUG nova.virt.hardware [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 934.137276] env[63538]: DEBUG nova.virt.hardware [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 934.137518] env[63538]: DEBUG nova.virt.hardware [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 934.137949] env[63538]: DEBUG nova.compute.manager [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 934.142080] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50664d36-cd76-4f3b-852d-1a2e6ca99529 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.152235] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9137a6f1-9299-41de-a6f8-59e4fa67542c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.209979] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.143s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.212127] env[63538]: DEBUG oslo_concurrency.lockutils [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.110s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.213798] env[63538]: INFO nova.compute.claims [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 934.217078] env[63538]: INFO nova.compute.manager [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Took 44.74 seconds to build instance. [ 934.241715] env[63538]: INFO nova.scheduler.client.report [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Deleted allocations for instance edc670dd-732a-4c54-924c-c99ee539d4d9 [ 934.502954] env[63538]: DEBUG nova.network.neutron [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 934.664819] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.719359] env[63538]: DEBUG nova.network.neutron [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Updating instance_info_cache with network_info: [{"id": "674bcf37-4948-4ce1-8f18-5ba7912f2544", "address": "fa:16:3e:c3:72:33", "network": {"id": "4b4aacee-e2db-457b-9900-6c94f101831e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1899782517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "422f50dc66ec48b7b262643390072f3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap674bcf37-49", "ovs_interfaceid": "674bcf37-4948-4ce1-8f18-5ba7912f2544", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.728095] env[63538]: DEBUG oslo_concurrency.lockutils [None req-42f7e61d-4a77-4c8a-9551-4ee816476822 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "4ec5d3a2-8b29-4074-b323-f94704043b8b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.317s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.749579] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6576774e-7088-4942-a93e-4086d4a0b6d1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "edc670dd-732a-4c54-924c-c99ee539d4d9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.811s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.178230] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Acquiring lock "e447c109-4cef-4cc7-9acf-61abc0f47482" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.178230] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Lock "e447c109-4cef-4cc7-9acf-61abc0f47482" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.178230] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Acquiring lock "e447c109-4cef-4cc7-9acf-61abc0f47482-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.178230] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Lock "e447c109-4cef-4cc7-9acf-61abc0f47482-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.178230] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Lock "e447c109-4cef-4cc7-9acf-61abc0f47482-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.180897] env[63538]: INFO nova.compute.manager [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Terminating instance [ 935.183100] env[63538]: DEBUG nova.compute.manager [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 935.183499] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 935.184474] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40963487-676b-4258-bd23-357b12a5fa72 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.194110] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 935.195878] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6edc061c-1680-4a69-a9ba-fd88494220f7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.204583] env[63538]: DEBUG oslo_vmware.api [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Waiting for the task: (returnval){ [ 935.204583] env[63538]: value = "task-5101259" [ 935.204583] env[63538]: _type = "Task" [ 935.204583] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.208025] env[63538]: DEBUG oslo_vmware.rw_handles [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52861c08-772a-46af-1e24-33fee86d127d/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 935.208025] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71230c49-c84f-4a37-ac6e-67b88a7a97ed {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.221641] env[63538]: DEBUG oslo_vmware.api [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101259, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.223789] env[63538]: DEBUG oslo_vmware.rw_handles [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52861c08-772a-46af-1e24-33fee86d127d/disk-0.vmdk is in state: ready. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 935.224375] env[63538]: ERROR oslo_vmware.rw_handles [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52861c08-772a-46af-1e24-33fee86d127d/disk-0.vmdk due to incomplete transfer. [ 935.224854] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-428599c2-a9df-4be0-bc27-ac5c55f3545a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.230185] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Releasing lock "refresh_cache-90e56075-0d77-467f-90be-913315b63b33" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 935.231412] env[63538]: DEBUG nova.compute.manager [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Instance network_info: |[{"id": "674bcf37-4948-4ce1-8f18-5ba7912f2544", "address": "fa:16:3e:c3:72:33", "network": {"id": "4b4aacee-e2db-457b-9900-6c94f101831e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1899782517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "422f50dc66ec48b7b262643390072f3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap674bcf37-49", "ovs_interfaceid": "674bcf37-4948-4ce1-8f18-5ba7912f2544", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 935.231412] env[63538]: DEBUG oslo_concurrency.lockutils [req-d2fcb300-4464-4c4d-a32d-8019da6947d0 req-a2500dc0-e6a0-4dc7-a40f-2e2463673e63 service nova] Acquired lock "refresh_cache-90e56075-0d77-467f-90be-913315b63b33" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.232111] env[63538]: DEBUG nova.network.neutron [req-d2fcb300-4464-4c4d-a32d-8019da6947d0 req-a2500dc0-e6a0-4dc7-a40f-2e2463673e63 service nova] [instance: 90e56075-0d77-467f-90be-913315b63b33] Refreshing network info cache for port 674bcf37-4948-4ce1-8f18-5ba7912f2544 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 935.234115] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:72:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f972c061-0cd5-4aed-8cfb-42cc4a08835a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '674bcf37-4948-4ce1-8f18-5ba7912f2544', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 935.245168] env[63538]: DEBUG oslo.service.loopingcall [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 935.249932] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90e56075-0d77-467f-90be-913315b63b33] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 935.251201] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7358f4ce-b35a-430f-84c3-befc72522c6e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.273409] env[63538]: DEBUG oslo_vmware.rw_handles [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52861c08-772a-46af-1e24-33fee86d127d/disk-0.vmdk. {{(pid=63538) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 935.273712] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Uploaded image a24c0f28-e6a9-4f20-ac43-e49948a73214 to the Glance image server {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 935.276488] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Destroying the VM {{(pid=63538) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 935.277438] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ac64641b-9d48-42e0-acb5-1d47317547d4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.283097] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 935.283097] env[63538]: value = "task-5101260" [ 935.283097] env[63538]: _type = "Task" [ 935.283097] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.289326] env[63538]: DEBUG oslo_vmware.api [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 935.289326] env[63538]: value = "task-5101261" [ 935.289326] env[63538]: _type = "Task" [ 935.289326] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.301603] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101260, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.309233] env[63538]: DEBUG oslo_vmware.api [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101261, 'name': Destroy_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.616051] env[63538]: DEBUG nova.network.neutron [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Successfully updated port: 5a3ccff0-6550-429c-a4ce-0afa4c25230f {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 935.725489] env[63538]: DEBUG oslo_vmware.api [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101259, 'name': PowerOffVM_Task, 'duration_secs': 0.265057} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.725811] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 935.726033] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 935.726364] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-71f8513f-839a-4e74-9cfe-f84d2c7ee07a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.733028] env[63538]: DEBUG nova.network.neutron [req-d2fcb300-4464-4c4d-a32d-8019da6947d0 req-a2500dc0-e6a0-4dc7-a40f-2e2463673e63 service nova] [instance: 90e56075-0d77-467f-90be-913315b63b33] Updated VIF entry in instance network info cache for port 674bcf37-4948-4ce1-8f18-5ba7912f2544. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 935.733028] env[63538]: DEBUG nova.network.neutron [req-d2fcb300-4464-4c4d-a32d-8019da6947d0 req-a2500dc0-e6a0-4dc7-a40f-2e2463673e63 service nova] [instance: 90e56075-0d77-467f-90be-913315b63b33] Updating instance_info_cache with network_info: [{"id": "674bcf37-4948-4ce1-8f18-5ba7912f2544", "address": "fa:16:3e:c3:72:33", "network": {"id": "4b4aacee-e2db-457b-9900-6c94f101831e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1899782517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "422f50dc66ec48b7b262643390072f3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap674bcf37-49", "ovs_interfaceid": "674bcf37-4948-4ce1-8f18-5ba7912f2544", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.794974] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101260, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.802437] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 935.802871] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 935.802871] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Deleting the datastore file [datastore2] e447c109-4cef-4cc7-9acf-61abc0f47482 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 935.803180] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c6847b58-e6e9-47ab-bf35-996530a29c16 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.809053] env[63538]: DEBUG oslo_vmware.api [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101261, 'name': Destroy_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.814573] env[63538]: DEBUG oslo_vmware.api [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Waiting for the task: (returnval){ [ 935.814573] env[63538]: value = "task-5101263" [ 935.814573] env[63538]: _type = "Task" [ 935.814573] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.823913] env[63538]: DEBUG oslo_vmware.api [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101263, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.826788] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45bb79f6-6689-4b46-a3d9-f5554a7c35f7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.836655] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db29f36-514a-45d0-8d24-e3302c6af921 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.869593] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e068f140-afec-4e59-80c1-e0e862af9258 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.877788] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4decf5b6-4e37-445a-8944-02047e7426f7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.893902] env[63538]: DEBUG nova.compute.provider_tree [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 936.118933] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "refresh_cache-2e97b357-0200-4aed-9705-dd7808f853ba" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.119127] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired lock "refresh_cache-2e97b357-0200-4aed-9705-dd7808f853ba" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.119290] env[63538]: DEBUG nova.network.neutron [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 936.221666] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquiring lock "6bc30d96-8056-421c-875b-c24488e5f595" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.221952] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "6bc30d96-8056-421c-875b-c24488e5f595" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.235223] env[63538]: DEBUG oslo_concurrency.lockutils [req-d2fcb300-4464-4c4d-a32d-8019da6947d0 req-a2500dc0-e6a0-4dc7-a40f-2e2463673e63 service nova] Releasing lock "refresh_cache-90e56075-0d77-467f-90be-913315b63b33" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.235624] env[63538]: DEBUG nova.compute.manager [req-d2fcb300-4464-4c4d-a32d-8019da6947d0 req-a2500dc0-e6a0-4dc7-a40f-2e2463673e63 service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Received event network-changed-312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 936.235793] env[63538]: DEBUG nova.compute.manager [req-d2fcb300-4464-4c4d-a32d-8019da6947d0 req-a2500dc0-e6a0-4dc7-a40f-2e2463673e63 service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Refreshing instance network info cache due to event network-changed-312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 936.236097] env[63538]: DEBUG oslo_concurrency.lockutils [req-d2fcb300-4464-4c4d-a32d-8019da6947d0 req-a2500dc0-e6a0-4dc7-a40f-2e2463673e63 service nova] Acquiring lock "refresh_cache-e447c109-4cef-4cc7-9acf-61abc0f47482" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.236336] env[63538]: DEBUG oslo_concurrency.lockutils [req-d2fcb300-4464-4c4d-a32d-8019da6947d0 req-a2500dc0-e6a0-4dc7-a40f-2e2463673e63 service nova] Acquired lock "refresh_cache-e447c109-4cef-4cc7-9acf-61abc0f47482" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.236530] env[63538]: DEBUG nova.network.neutron [req-d2fcb300-4464-4c4d-a32d-8019da6947d0 req-a2500dc0-e6a0-4dc7-a40f-2e2463673e63 service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Refreshing network info cache for port 312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 936.240166] env[63538]: DEBUG nova.compute.manager [req-0df998b6-8a50-49d8-9a11-cbacc91ec277 req-d7af0223-28bd-40ff-a321-a25e60a4a9f9 service nova] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Received event network-vif-plugged-5a3ccff0-6550-429c-a4ce-0afa4c25230f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 936.240479] env[63538]: DEBUG oslo_concurrency.lockutils [req-0df998b6-8a50-49d8-9a11-cbacc91ec277 req-d7af0223-28bd-40ff-a321-a25e60a4a9f9 service nova] Acquiring lock "2e97b357-0200-4aed-9705-dd7808f853ba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.240639] env[63538]: DEBUG oslo_concurrency.lockutils [req-0df998b6-8a50-49d8-9a11-cbacc91ec277 req-d7af0223-28bd-40ff-a321-a25e60a4a9f9 service nova] Lock "2e97b357-0200-4aed-9705-dd7808f853ba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.240777] env[63538]: DEBUG oslo_concurrency.lockutils [req-0df998b6-8a50-49d8-9a11-cbacc91ec277 req-d7af0223-28bd-40ff-a321-a25e60a4a9f9 service nova] Lock "2e97b357-0200-4aed-9705-dd7808f853ba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.240934] env[63538]: DEBUG nova.compute.manager [req-0df998b6-8a50-49d8-9a11-cbacc91ec277 req-d7af0223-28bd-40ff-a321-a25e60a4a9f9 service nova] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] No waiting events found dispatching network-vif-plugged-5a3ccff0-6550-429c-a4ce-0afa4c25230f {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 936.241149] env[63538]: WARNING nova.compute.manager [req-0df998b6-8a50-49d8-9a11-cbacc91ec277 req-d7af0223-28bd-40ff-a321-a25e60a4a9f9 service nova] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Received unexpected event network-vif-plugged-5a3ccff0-6550-429c-a4ce-0afa4c25230f for instance with vm_state building and task_state spawning. [ 936.241333] env[63538]: DEBUG nova.compute.manager [req-0df998b6-8a50-49d8-9a11-cbacc91ec277 req-d7af0223-28bd-40ff-a321-a25e60a4a9f9 service nova] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Received event network-changed-5a3ccff0-6550-429c-a4ce-0afa4c25230f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 936.241489] env[63538]: DEBUG nova.compute.manager [req-0df998b6-8a50-49d8-9a11-cbacc91ec277 req-d7af0223-28bd-40ff-a321-a25e60a4a9f9 service nova] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Refreshing instance network info cache due to event network-changed-5a3ccff0-6550-429c-a4ce-0afa4c25230f. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 936.241671] env[63538]: DEBUG oslo_concurrency.lockutils [req-0df998b6-8a50-49d8-9a11-cbacc91ec277 req-d7af0223-28bd-40ff-a321-a25e60a4a9f9 service nova] Acquiring lock "refresh_cache-2e97b357-0200-4aed-9705-dd7808f853ba" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.295877] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101260, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.307552] env[63538]: DEBUG oslo_vmware.api [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101261, 'name': Destroy_Task, 'duration_secs': 0.526488} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.307920] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Destroyed the VM [ 936.308194] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Deleting Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 936.309072] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-53c24104-a6ff-4604-be2c-c5299133ee8f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.319935] env[63538]: DEBUG oslo_vmware.api [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 936.319935] env[63538]: value = "task-5101264" [ 936.319935] env[63538]: _type = "Task" [ 936.319935] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.334181] env[63538]: DEBUG oslo_vmware.api [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Task: {'id': task-5101263, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199833} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.338393] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 936.338640] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 936.338826] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 936.339110] env[63538]: INFO nova.compute.manager [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Took 1.16 seconds to destroy the instance on the hypervisor. [ 936.339336] env[63538]: DEBUG oslo.service.loopingcall [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 936.339952] env[63538]: DEBUG oslo_vmware.api [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101264, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.340454] env[63538]: DEBUG nova.compute.manager [-] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 936.340618] env[63538]: DEBUG nova.network.neutron [-] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 936.397601] env[63538]: DEBUG nova.scheduler.client.report [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 936.652401] env[63538]: DEBUG nova.network.neutron [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 936.725060] env[63538]: DEBUG nova.compute.manager [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 936.776695] env[63538]: INFO nova.network.neutron [req-d2fcb300-4464-4c4d-a32d-8019da6947d0 req-a2500dc0-e6a0-4dc7-a40f-2e2463673e63 service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Port 312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 936.776988] env[63538]: DEBUG nova.network.neutron [req-d2fcb300-4464-4c4d-a32d-8019da6947d0 req-a2500dc0-e6a0-4dc7-a40f-2e2463673e63 service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.799441] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101260, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.830789] env[63538]: DEBUG nova.compute.manager [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Stashing vm_state: active {{(pid=63538) _prep_resize /opt/stack/nova/nova/compute/manager.py:5670}} [ 936.840176] env[63538]: DEBUG oslo_vmware.api [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101264, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.863395] env[63538]: DEBUG nova.network.neutron [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Updating instance_info_cache with network_info: [{"id": "5a3ccff0-6550-429c-a4ce-0afa4c25230f", "address": "fa:16:3e:eb:33:f1", "network": {"id": "01450801-f549-4844-80bd-208ec405c65f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684310085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "492427e54e1048f292dab2abdac71af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a3ccff0-65", "ovs_interfaceid": "5a3ccff0-6550-429c-a4ce-0afa4c25230f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.904579] env[63538]: DEBUG oslo_concurrency.lockutils [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.692s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.905541] env[63538]: DEBUG nova.compute.manager [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 936.909609] env[63538]: DEBUG oslo_concurrency.lockutils [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.188s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.909974] env[63538]: DEBUG nova.objects.instance [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Lazy-loading 'resources' on Instance uuid b0b4ae9c-95d3-47a1-86a7-120c88b60704 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 937.071377] env[63538]: DEBUG nova.network.neutron [-] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.257469] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.279503] env[63538]: DEBUG oslo_concurrency.lockutils [req-d2fcb300-4464-4c4d-a32d-8019da6947d0 req-a2500dc0-e6a0-4dc7-a40f-2e2463673e63 service nova] Releasing lock "refresh_cache-e447c109-4cef-4cc7-9acf-61abc0f47482" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.302548] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101260, 'name': CreateVM_Task, 'duration_secs': 1.518345} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.302865] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90e56075-0d77-467f-90be-913315b63b33] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 937.303991] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.304303] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.304791] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 937.305189] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60f71a19-131f-4c2e-9eba-95faddaaec29 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.312100] env[63538]: DEBUG oslo_vmware.api [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 937.312100] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a79d2b-a1e0-e94e-55bc-9e9b139393cf" [ 937.312100] env[63538]: _type = "Task" [ 937.312100] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.325823] env[63538]: DEBUG oslo_vmware.api [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a79d2b-a1e0-e94e-55bc-9e9b139393cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.342497] env[63538]: DEBUG oslo_vmware.api [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101264, 'name': RemoveSnapshot_Task} progress is 16%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.353091] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.367425] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Releasing lock "refresh_cache-2e97b357-0200-4aed-9705-dd7808f853ba" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.368013] env[63538]: DEBUG nova.compute.manager [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Instance network_info: |[{"id": "5a3ccff0-6550-429c-a4ce-0afa4c25230f", "address": "fa:16:3e:eb:33:f1", "network": {"id": "01450801-f549-4844-80bd-208ec405c65f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684310085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "492427e54e1048f292dab2abdac71af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a3ccff0-65", "ovs_interfaceid": "5a3ccff0-6550-429c-a4ce-0afa4c25230f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 937.368290] env[63538]: DEBUG oslo_concurrency.lockutils [req-0df998b6-8a50-49d8-9a11-cbacc91ec277 req-d7af0223-28bd-40ff-a321-a25e60a4a9f9 service nova] Acquired lock "refresh_cache-2e97b357-0200-4aed-9705-dd7808f853ba" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.368533] env[63538]: DEBUG nova.network.neutron [req-0df998b6-8a50-49d8-9a11-cbacc91ec277 req-d7af0223-28bd-40ff-a321-a25e60a4a9f9 service nova] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Refreshing network info cache for port 5a3ccff0-6550-429c-a4ce-0afa4c25230f {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 937.369966] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:33:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32463b6d-4569-4755-8a29-873a028690a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5a3ccff0-6550-429c-a4ce-0afa4c25230f', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 937.379105] env[63538]: DEBUG oslo.service.loopingcall [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 937.382288] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 937.383287] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7469516e-b470-4820-9291-89f0c6f7ec57 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.406579] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 937.406579] env[63538]: value = "task-5101265" [ 937.406579] env[63538]: _type = "Task" [ 937.406579] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.417160] env[63538]: DEBUG nova.compute.utils [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 937.424085] env[63538]: DEBUG nova.compute.manager [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 937.424308] env[63538]: DEBUG nova.network.neutron [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 937.509463] env[63538]: DEBUG nova.policy [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cd6de144ccc4498aa90ae01ca7a0f6f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0d6954a5254f441ca256c85330297cef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 937.575991] env[63538]: INFO nova.compute.manager [-] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Took 1.23 seconds to deallocate network for instance. [ 937.744187] env[63538]: DEBUG nova.network.neutron [req-0df998b6-8a50-49d8-9a11-cbacc91ec277 req-d7af0223-28bd-40ff-a321-a25e60a4a9f9 service nova] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Updated VIF entry in instance network info cache for port 5a3ccff0-6550-429c-a4ce-0afa4c25230f. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 937.744187] env[63538]: DEBUG nova.network.neutron [req-0df998b6-8a50-49d8-9a11-cbacc91ec277 req-d7af0223-28bd-40ff-a321-a25e60a4a9f9 service nova] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Updating instance_info_cache with network_info: [{"id": "5a3ccff0-6550-429c-a4ce-0afa4c25230f", "address": "fa:16:3e:eb:33:f1", "network": {"id": "01450801-f549-4844-80bd-208ec405c65f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684310085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "492427e54e1048f292dab2abdac71af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a3ccff0-65", "ovs_interfaceid": "5a3ccff0-6550-429c-a4ce-0afa4c25230f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.824454] env[63538]: DEBUG oslo_vmware.api [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a79d2b-a1e0-e94e-55bc-9e9b139393cf, 'name': SearchDatastore_Task, 'duration_secs': 0.011863} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.825907] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.826200] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 937.826460] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.827448] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.827448] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 937.827954] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4eb743a-c285-41cc-9798-d660e04c9171 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.830554] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dbbaf876-052a-463d-aebc-10359417115e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.843207] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40091ccd-880f-4ef8-897b-8a99f079b2bc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.847268] env[63538]: DEBUG oslo_vmware.api [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101264, 'name': RemoveSnapshot_Task, 'duration_secs': 1.201227} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.848057] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Deleted Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 937.848323] env[63538]: INFO nova.compute.manager [None req-59b6bae7-15a5-42f2-afce-7415762936c5 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Took 17.05 seconds to snapshot the instance on the hypervisor. [ 937.850762] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 937.850939] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 937.853181] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2467f87-5b97-4d2b-a677-95d93a1755d4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.884080] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-713ddd24-069f-48ef-a2ca-a2642f1ec6b8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.886992] env[63538]: DEBUG oslo_vmware.api [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 937.886992] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52db01e5-ba52-247d-8acb-287c9924df1a" [ 937.886992] env[63538]: _type = "Task" [ 937.886992] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.895081] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4327747-ef19-4cf2-986d-867843a0a246 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.903383] env[63538]: DEBUG nova.network.neutron [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Successfully created port: 3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 937.905712] env[63538]: DEBUG oslo_vmware.api [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52db01e5-ba52-247d-8acb-287c9924df1a, 'name': SearchDatastore_Task, 'duration_secs': 0.010126} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.907184] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9cf9032-6721-4f7b-8912-c43d6a59a6bc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.923165] env[63538]: DEBUG nova.compute.provider_tree [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 937.924552] env[63538]: DEBUG nova.compute.manager [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 937.930930] env[63538]: DEBUG oslo_vmware.api [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 937.930930] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5227b465-0ec7-69b5-f312-e8ecc6bb1b8d" [ 937.930930] env[63538]: _type = "Task" [ 937.930930] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.940388] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101265, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.950507] env[63538]: DEBUG oslo_vmware.api [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5227b465-0ec7-69b5-f312-e8ecc6bb1b8d, 'name': SearchDatastore_Task, 'duration_secs': 0.017648} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.950856] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.951146] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 90e56075-0d77-467f-90be-913315b63b33/90e56075-0d77-467f-90be-913315b63b33.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 937.951652] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e9edbad9-830f-459e-b668-0184af154b12 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.961202] env[63538]: DEBUG oslo_vmware.api [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 937.961202] env[63538]: value = "task-5101266" [ 937.961202] env[63538]: _type = "Task" [ 937.961202] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.972387] env[63538]: DEBUG oslo_vmware.api [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101266, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.083526] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.248189] env[63538]: DEBUG oslo_concurrency.lockutils [req-0df998b6-8a50-49d8-9a11-cbacc91ec277 req-d7af0223-28bd-40ff-a321-a25e60a4a9f9 service nova] Releasing lock "refresh_cache-2e97b357-0200-4aed-9705-dd7808f853ba" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.302280] env[63538]: DEBUG nova.compute.manager [req-2c44263d-da4c-43e2-9349-ef1b6e3d0828 req-00c0cde1-fe5a-4d31-af8c-3c73c458dab0 service nova] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Received event network-vif-deleted-312c7ab1-93a3-4ce7-b0d3-e01d2c48db0b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 938.423925] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101265, 'name': CreateVM_Task, 'duration_secs': 0.657844} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.424154] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 938.424903] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.425126] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.425492] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 938.425781] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2dcdda41-d589-40c2-820d-0cc1530fc18a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.437688] env[63538]: DEBUG oslo_vmware.api [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 938.437688] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52410529-1f1c-90f9-c114-f694eff046e3" [ 938.437688] env[63538]: _type = "Task" [ 938.437688] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.452994] env[63538]: DEBUG oslo_vmware.api [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52410529-1f1c-90f9-c114-f694eff046e3, 'name': SearchDatastore_Task, 'duration_secs': 0.01494} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.452994] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.452994] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 938.452994] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.452994] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.454367] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 938.456218] env[63538]: ERROR nova.scheduler.client.report [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] [req-4adc3a54-5298-48ca-8ca7-8e099537e58e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f65218a4-1d3d-476a-9093-01cae92c8635. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4adc3a54-5298-48ca-8ca7-8e099537e58e"}]} [ 938.456788] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de2bf96a-3767-4064-951f-0e520d8bd67d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.471389] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 938.471841] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 938.476139] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf7f6126-8730-472b-82d6-8052874ccd40 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.479199] env[63538]: DEBUG oslo_vmware.api [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101266, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.498176} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.480029] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 90e56075-0d77-467f-90be-913315b63b33/90e56075-0d77-467f-90be-913315b63b33.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 938.483328] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 938.483328] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3afa3f3f-1b28-4f5c-8930-91fb9e7fe14c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.485155] env[63538]: DEBUG nova.scheduler.client.report [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Refreshing inventories for resource provider f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 938.489466] env[63538]: DEBUG oslo_vmware.api [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 938.489466] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e2cc6b-cea0-a3f0-c67a-4b75f8140e63" [ 938.489466] env[63538]: _type = "Task" [ 938.489466] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.497869] env[63538]: DEBUG oslo_vmware.api [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 938.497869] env[63538]: value = "task-5101267" [ 938.497869] env[63538]: _type = "Task" [ 938.497869] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.510189] env[63538]: DEBUG oslo_vmware.api [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e2cc6b-cea0-a3f0-c67a-4b75f8140e63, 'name': SearchDatastore_Task, 'duration_secs': 0.011508} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.512389] env[63538]: DEBUG nova.scheduler.client.report [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Updating ProviderTree inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 938.512672] env[63538]: DEBUG nova.compute.provider_tree [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 938.516204] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cac49476-9f47-4f15-8e9a-5b4f6f5525e5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.522089] env[63538]: DEBUG oslo_vmware.api [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101267, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.526253] env[63538]: DEBUG oslo_vmware.api [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 938.526253] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b07b34-8e73-8427-b5d8-fd3c71b7deb0" [ 938.526253] env[63538]: _type = "Task" [ 938.526253] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.539582] env[63538]: DEBUG oslo_vmware.api [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b07b34-8e73-8427-b5d8-fd3c71b7deb0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.540757] env[63538]: DEBUG nova.scheduler.client.report [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Refreshing aggregate associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, aggregates: None {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 938.564393] env[63538]: DEBUG nova.scheduler.client.report [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Refreshing trait associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 938.938371] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19a9038-b771-4c63-9578-5ab9ed9c0b55 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.947182] env[63538]: DEBUG nova.compute.manager [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 938.950673] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab983e62-375f-4781-8ebe-a05025e4bdcf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.985323] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c89af6-fd4a-40c4-ba57-25acd517e8dc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.995125] env[63538]: DEBUG nova.virt.hardware [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 938.995418] env[63538]: DEBUG nova.virt.hardware [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 938.995584] env[63538]: DEBUG nova.virt.hardware [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 938.995776] env[63538]: DEBUG nova.virt.hardware [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 938.995930] env[63538]: DEBUG nova.virt.hardware [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 938.996107] env[63538]: DEBUG nova.virt.hardware [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 938.996343] env[63538]: DEBUG nova.virt.hardware [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 938.996526] env[63538]: DEBUG nova.virt.hardware [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 938.996673] env[63538]: DEBUG nova.virt.hardware [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 938.996841] env[63538]: DEBUG nova.virt.hardware [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 938.997038] env[63538]: DEBUG nova.virt.hardware [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 938.999562] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dea50b1-d99c-483d-857a-14e1ad4c49a2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.005816] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a5956ab-6a94-4f9d-9e35-e0af24dafd4e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.023213] env[63538]: DEBUG nova.compute.provider_tree [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 939.033490] env[63538]: DEBUG oslo_vmware.api [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101267, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078035} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.033858] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fb72464-6463-4116-8d91-83d4ac3159eb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.041705] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 939.042207] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5326f73e-5500-4041-a301-b0cb08b2a9cf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.076507] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 90e56075-0d77-467f-90be-913315b63b33/90e56075-0d77-467f-90be-913315b63b33.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 939.081313] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2660609b-b4cf-40b5-b306-1e302acb91b0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.096261] env[63538]: DEBUG oslo_vmware.api [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b07b34-8e73-8427-b5d8-fd3c71b7deb0, 'name': SearchDatastore_Task, 'duration_secs': 0.01128} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.097017] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.097300] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 2e97b357-0200-4aed-9705-dd7808f853ba/2e97b357-0200-4aed-9705-dd7808f853ba.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 939.098058] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1ffa437-be56-4d48-a9ce-ef2df5eef604 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.103977] env[63538]: DEBUG oslo_vmware.api [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 939.103977] env[63538]: value = "task-5101268" [ 939.103977] env[63538]: _type = "Task" [ 939.103977] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.109179] env[63538]: DEBUG oslo_vmware.api [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 939.109179] env[63538]: value = "task-5101269" [ 939.109179] env[63538]: _type = "Task" [ 939.109179] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.117672] env[63538]: DEBUG oslo_vmware.api [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101268, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.124560] env[63538]: DEBUG oslo_vmware.api [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101269, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.582178] env[63538]: DEBUG nova.scheduler.client.report [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Updated inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 with generation 107 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 939.582493] env[63538]: DEBUG nova.compute.provider_tree [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Updating resource provider f65218a4-1d3d-476a-9093-01cae92c8635 generation from 107 to 108 during operation: update_inventory {{(pid=63538) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 939.582685] env[63538]: DEBUG nova.compute.provider_tree [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 939.621915] env[63538]: DEBUG oslo_vmware.api [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101268, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.630973] env[63538]: DEBUG oslo_vmware.api [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101269, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.688891] env[63538]: DEBUG nova.network.neutron [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Successfully updated port: 3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 940.089045] env[63538]: DEBUG oslo_concurrency.lockutils [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.179s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.092170] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.959s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.093838] env[63538]: INFO nova.compute.claims [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 940.117845] env[63538]: DEBUG oslo_vmware.api [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101268, 'name': ReconfigVM_Task, 'duration_secs': 0.656915} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.118605] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 90e56075-0d77-467f-90be-913315b63b33/90e56075-0d77-467f-90be-913315b63b33.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 940.120800] env[63538]: INFO nova.scheduler.client.report [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Deleted allocations for instance b0b4ae9c-95d3-47a1-86a7-120c88b60704 [ 940.121407] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-05ed9158-fa24-4f5d-b384-795366e90987 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.126976] env[63538]: DEBUG oslo_vmware.api [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101269, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.787175} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.131039] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 2e97b357-0200-4aed-9705-dd7808f853ba/2e97b357-0200-4aed-9705-dd7808f853ba.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 940.131039] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 940.131039] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d5fc1329-f6ef-4dab-80ce-3bf57fa47d34 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.136061] env[63538]: DEBUG oslo_vmware.api [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 940.136061] env[63538]: value = "task-5101270" [ 940.136061] env[63538]: _type = "Task" [ 940.136061] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.141295] env[63538]: DEBUG oslo_vmware.api [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 940.141295] env[63538]: value = "task-5101271" [ 940.141295] env[63538]: _type = "Task" [ 940.141295] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.150997] env[63538]: DEBUG oslo_vmware.api [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101270, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.157236] env[63538]: DEBUG oslo_vmware.api [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101271, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.191326] env[63538]: DEBUG oslo_concurrency.lockutils [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "refresh_cache-8ed0bd15-71fc-435e-9e4a-90b023ad8a79" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.193906] env[63538]: DEBUG oslo_concurrency.lockutils [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired lock "refresh_cache-8ed0bd15-71fc-435e-9e4a-90b023ad8a79" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.193906] env[63538]: DEBUG nova.network.neutron [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 940.357243] env[63538]: DEBUG nova.compute.manager [req-5908da3f-7ba7-4c41-8f10-3b779a144475 req-4107baac-e50d-4cd0-8109-25d0099e5152 service nova] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Received event network-vif-plugged-3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 940.357486] env[63538]: DEBUG oslo_concurrency.lockutils [req-5908da3f-7ba7-4c41-8f10-3b779a144475 req-4107baac-e50d-4cd0-8109-25d0099e5152 service nova] Acquiring lock "8ed0bd15-71fc-435e-9e4a-90b023ad8a79-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.357623] env[63538]: DEBUG oslo_concurrency.lockutils [req-5908da3f-7ba7-4c41-8f10-3b779a144475 req-4107baac-e50d-4cd0-8109-25d0099e5152 service nova] Lock "8ed0bd15-71fc-435e-9e4a-90b023ad8a79-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.357801] env[63538]: DEBUG oslo_concurrency.lockutils [req-5908da3f-7ba7-4c41-8f10-3b779a144475 req-4107baac-e50d-4cd0-8109-25d0099e5152 service nova] Lock "8ed0bd15-71fc-435e-9e4a-90b023ad8a79-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.357987] env[63538]: DEBUG nova.compute.manager [req-5908da3f-7ba7-4c41-8f10-3b779a144475 req-4107baac-e50d-4cd0-8109-25d0099e5152 service nova] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] No waiting events found dispatching network-vif-plugged-3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 940.358175] env[63538]: WARNING nova.compute.manager [req-5908da3f-7ba7-4c41-8f10-3b779a144475 req-4107baac-e50d-4cd0-8109-25d0099e5152 service nova] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Received unexpected event network-vif-plugged-3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46 for instance with vm_state building and task_state spawning. [ 940.358348] env[63538]: DEBUG nova.compute.manager [req-5908da3f-7ba7-4c41-8f10-3b779a144475 req-4107baac-e50d-4cd0-8109-25d0099e5152 service nova] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Received event network-changed-3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 940.358506] env[63538]: DEBUG nova.compute.manager [req-5908da3f-7ba7-4c41-8f10-3b779a144475 req-4107baac-e50d-4cd0-8109-25d0099e5152 service nova] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Refreshing instance network info cache due to event network-changed-3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 940.358723] env[63538]: DEBUG oslo_concurrency.lockutils [req-5908da3f-7ba7-4c41-8f10-3b779a144475 req-4107baac-e50d-4cd0-8109-25d0099e5152 service nova] Acquiring lock "refresh_cache-8ed0bd15-71fc-435e-9e4a-90b023ad8a79" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.446452] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "d5d557c6-3d4e-4122-8756-218c9757fa01" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.446749] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "d5d557c6-3d4e-4122-8756-218c9757fa01" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.446998] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "d5d557c6-3d4e-4122-8756-218c9757fa01-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.447250] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "d5d557c6-3d4e-4122-8756-218c9757fa01-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.447503] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "d5d557c6-3d4e-4122-8756-218c9757fa01-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.450032] env[63538]: INFO nova.compute.manager [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Terminating instance [ 940.452090] env[63538]: DEBUG nova.compute.manager [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 940.452292] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 940.453955] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1da23a-3ef5-4c32-ac7a-d23a03b697c4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.461333] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 940.461637] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4500e3b9-932c-471a-8109-cc4715c53d60 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.469815] env[63538]: DEBUG oslo_vmware.api [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 940.469815] env[63538]: value = "task-5101272" [ 940.469815] env[63538]: _type = "Task" [ 940.469815] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.479710] env[63538]: DEBUG oslo_vmware.api [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101272, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.634516] env[63538]: DEBUG oslo_concurrency.lockutils [None req-95bb7408-2a32-4b81-b651-ebdffe579c3f tempest-InstanceActionsTestJSON-835482681 tempest-InstanceActionsTestJSON-835482681-project-member] Lock "b0b4ae9c-95d3-47a1-86a7-120c88b60704" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.951s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.648749] env[63538]: DEBUG oslo_vmware.api [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101270, 'name': Rename_Task, 'duration_secs': 0.452191} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.649574] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 940.649877] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2fffc8b2-8592-4e60-a8bf-a9150bb2b8c3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.655506] env[63538]: DEBUG oslo_vmware.api [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101271, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.190386} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.656237] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 940.657150] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-749a1716-39b7-44e2-b545-46a8a6ffac7f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.662567] env[63538]: DEBUG oslo_vmware.api [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 940.662567] env[63538]: value = "task-5101273" [ 940.662567] env[63538]: _type = "Task" [ 940.662567] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.685437] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 2e97b357-0200-4aed-9705-dd7808f853ba/2e97b357-0200-4aed-9705-dd7808f853ba.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 940.686812] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b859f088-2be1-49a5-991f-b45d1e9ec2dd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.707160] env[63538]: DEBUG oslo_vmware.api [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101273, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.714121] env[63538]: DEBUG oslo_vmware.api [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 940.714121] env[63538]: value = "task-5101274" [ 940.714121] env[63538]: _type = "Task" [ 940.714121] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.723770] env[63538]: DEBUG oslo_vmware.api [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101274, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.738883] env[63538]: DEBUG nova.network.neutron [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 940.773653] env[63538]: DEBUG oslo_concurrency.lockutils [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Acquiring lock "5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.774022] env[63538]: DEBUG oslo_concurrency.lockutils [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lock "5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.775040] env[63538]: DEBUG oslo_concurrency.lockutils [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Acquiring lock "5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.775040] env[63538]: DEBUG oslo_concurrency.lockutils [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lock "5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.775040] env[63538]: DEBUG oslo_concurrency.lockutils [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lock "5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.777017] env[63538]: INFO nova.compute.manager [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Terminating instance [ 940.779561] env[63538]: DEBUG nova.compute.manager [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 940.779778] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 940.780665] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e58d2e5f-39f2-461a-9bde-81e0d5bc0620 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.792365] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 940.795805] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-36616278-acf0-4041-800f-b11e033fe30e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.804486] env[63538]: DEBUG oslo_vmware.api [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 940.804486] env[63538]: value = "task-5101275" [ 940.804486] env[63538]: _type = "Task" [ 940.804486] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.816200] env[63538]: DEBUG oslo_vmware.api [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101275, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.982400] env[63538]: DEBUG oslo_vmware.api [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101272, 'name': PowerOffVM_Task, 'duration_secs': 0.381728} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.982870] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 940.983113] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 940.983466] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5fdb3552-fa6a-44f5-9c57-c2c98e3368af {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.993979] env[63538]: DEBUG nova.network.neutron [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Updating instance_info_cache with network_info: [{"id": "3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46", "address": "fa:16:3e:63:ce:d8", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c0dc74b-3e", "ovs_interfaceid": "3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.084210] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 941.084618] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 941.084618] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Deleting the datastore file [datastore2] d5d557c6-3d4e-4122-8756-218c9757fa01 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 941.084923] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d3fd8244-1989-4ecf-9827-19f6750d9c83 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.096859] env[63538]: DEBUG oslo_vmware.api [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 941.096859] env[63538]: value = "task-5101277" [ 941.096859] env[63538]: _type = "Task" [ 941.096859] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.108636] env[63538]: DEBUG oslo_vmware.api [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101277, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.174285] env[63538]: DEBUG oslo_vmware.api [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101273, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.227359] env[63538]: DEBUG oslo_vmware.api [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101274, 'name': ReconfigVM_Task, 'duration_secs': 0.351639} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.227664] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 2e97b357-0200-4aed-9705-dd7808f853ba/2e97b357-0200-4aed-9705-dd7808f853ba.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 941.234763] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ba8a6446-d2a3-4e81-83c4-e03bcf1b66c9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.242704] env[63538]: DEBUG oslo_vmware.api [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 941.242704] env[63538]: value = "task-5101278" [ 941.242704] env[63538]: _type = "Task" [ 941.242704] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.252195] env[63538]: DEBUG oslo_vmware.api [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101278, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.319966] env[63538]: DEBUG oslo_vmware.api [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101275, 'name': PowerOffVM_Task, 'duration_secs': 0.229192} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.320423] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 941.320718] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 941.321117] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-585a62c3-abf8-4713-8dec-5ddd104db328 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.396695] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 941.396942] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 941.397152] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Deleting the datastore file [datastore1] 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 941.397479] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bdf10e3d-f33b-4942-b92c-68d98585913b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.411659] env[63538]: DEBUG oslo_vmware.api [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 941.411659] env[63538]: value = "task-5101280" [ 941.411659] env[63538]: _type = "Task" [ 941.411659] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.422515] env[63538]: DEBUG oslo_vmware.api [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101280, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.501027] env[63538]: DEBUG oslo_concurrency.lockutils [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Releasing lock "refresh_cache-8ed0bd15-71fc-435e-9e4a-90b023ad8a79" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.501027] env[63538]: DEBUG nova.compute.manager [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Instance network_info: |[{"id": "3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46", "address": "fa:16:3e:63:ce:d8", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c0dc74b-3e", "ovs_interfaceid": "3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 941.501027] env[63538]: DEBUG oslo_concurrency.lockutils [req-5908da3f-7ba7-4c41-8f10-3b779a144475 req-4107baac-e50d-4cd0-8109-25d0099e5152 service nova] Acquired lock "refresh_cache-8ed0bd15-71fc-435e-9e4a-90b023ad8a79" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.501027] env[63538]: DEBUG nova.network.neutron [req-5908da3f-7ba7-4c41-8f10-3b779a144475 req-4107baac-e50d-4cd0-8109-25d0099e5152 service nova] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Refreshing network info cache for port 3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 941.501027] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:ce:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f39e3b37-7906-4bbc-820e-ceac74e4d827', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 941.515307] env[63538]: DEBUG oslo.service.loopingcall [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 941.519224] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 941.519926] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ad94a95-38a1-430d-ad1f-b1ad1194162a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.546164] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 941.546164] env[63538]: value = "task-5101281" [ 941.546164] env[63538]: _type = "Task" [ 941.546164] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.558837] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101281, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.584045] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad8ea9b8-3f0c-4376-943e-250c37bde216 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.593061] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d198d47-c9d1-4a0e-989c-c1326299dc9d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.608609] env[63538]: DEBUG oslo_vmware.api [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101277, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172098} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.638502] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 941.638502] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 941.638912] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 941.639106] env[63538]: INFO nova.compute.manager [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Took 1.19 seconds to destroy the instance on the hypervisor. [ 941.639428] env[63538]: DEBUG oslo.service.loopingcall [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 941.639951] env[63538]: DEBUG nova.compute.manager [-] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 941.640061] env[63538]: DEBUG nova.network.neutron [-] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 941.642635] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97694682-3434-4af9-90f4-8e13c150bc67 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.652865] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc63e90f-5bf7-401d-b483-d87bf0fae8c5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.669145] env[63538]: DEBUG nova.compute.provider_tree [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.681163] env[63538]: DEBUG oslo_vmware.api [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101273, 'name': PowerOnVM_Task, 'duration_secs': 0.804998} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.682336] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 941.682555] env[63538]: INFO nova.compute.manager [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Took 10.24 seconds to spawn the instance on the hypervisor. [ 941.682738] env[63538]: DEBUG nova.compute.manager [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 941.683664] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e14e0ee-8f4a-47e9-85c4-9dbfcd31389e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.753855] env[63538]: DEBUG oslo_vmware.api [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101278, 'name': Rename_Task, 'duration_secs': 0.168959} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.754161] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 941.754430] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4a4704a4-f6a8-4c46-9d08-23f158914d58 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.762193] env[63538]: DEBUG oslo_vmware.api [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 941.762193] env[63538]: value = "task-5101282" [ 941.762193] env[63538]: _type = "Task" [ 941.762193] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.773615] env[63538]: DEBUG oslo_vmware.api [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101282, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.923130] env[63538]: DEBUG oslo_vmware.api [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101280, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.242697} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.923683] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 941.924038] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 941.924340] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 941.924640] env[63538]: INFO nova.compute.manager [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Took 1.14 seconds to destroy the instance on the hypervisor. [ 941.924993] env[63538]: DEBUG oslo.service.loopingcall [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 941.925385] env[63538]: DEBUG nova.compute.manager [-] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 941.925600] env[63538]: DEBUG nova.network.neutron [-] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 942.061364] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101281, 'name': CreateVM_Task, 'duration_secs': 0.367451} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.061831] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 942.062915] env[63538]: DEBUG oslo_concurrency.lockutils [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.063298] env[63538]: DEBUG oslo_concurrency.lockutils [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.066251] env[63538]: DEBUG oslo_concurrency.lockutils [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 942.066251] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97d77851-d06d-4d6a-b052-e025b112aeda {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.070980] env[63538]: DEBUG oslo_vmware.api [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 942.070980] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c67f7d-cca2-2a62-f90f-5e599f9372bf" [ 942.070980] env[63538]: _type = "Task" [ 942.070980] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.080883] env[63538]: DEBUG oslo_vmware.api [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c67f7d-cca2-2a62-f90f-5e599f9372bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.176900] env[63538]: DEBUG nova.scheduler.client.report [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 942.210900] env[63538]: INFO nova.compute.manager [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Took 40.54 seconds to build instance. [ 942.273245] env[63538]: DEBUG oslo_vmware.api [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101282, 'name': PowerOnVM_Task, 'duration_secs': 0.505229} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.276345] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 942.276637] env[63538]: INFO nova.compute.manager [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Took 8.19 seconds to spawn the instance on the hypervisor. [ 942.277104] env[63538]: DEBUG nova.compute.manager [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 942.278045] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2db0a95-53d6-4feb-a3f2-4b015e12b13d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.472609] env[63538]: DEBUG nova.network.neutron [req-5908da3f-7ba7-4c41-8f10-3b779a144475 req-4107baac-e50d-4cd0-8109-25d0099e5152 service nova] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Updated VIF entry in instance network info cache for port 3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 942.473030] env[63538]: DEBUG nova.network.neutron [req-5908da3f-7ba7-4c41-8f10-3b779a144475 req-4107baac-e50d-4cd0-8109-25d0099e5152 service nova] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Updating instance_info_cache with network_info: [{"id": "3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46", "address": "fa:16:3e:63:ce:d8", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c0dc74b-3e", "ovs_interfaceid": "3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.588017] env[63538]: DEBUG oslo_vmware.api [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c67f7d-cca2-2a62-f90f-5e599f9372bf, 'name': SearchDatastore_Task, 'duration_secs': 0.013011} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.588017] env[63538]: DEBUG oslo_concurrency.lockutils [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.588017] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 942.588017] env[63538]: DEBUG oslo_concurrency.lockutils [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.588017] env[63538]: DEBUG oslo_concurrency.lockutils [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.588017] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 942.588017] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-60c625cb-04e5-47a5-93b1-03f9a33b0381 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.603708] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 942.603708] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 942.604589] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e51790b6-ae98-462f-9845-e84157865dd1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.611488] env[63538]: DEBUG oslo_vmware.api [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 942.611488] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f46a05-ed0b-d2be-be28-77b1b074e19e" [ 942.611488] env[63538]: _type = "Task" [ 942.611488] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.621617] env[63538]: DEBUG oslo_vmware.api [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f46a05-ed0b-d2be-be28-77b1b074e19e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.681561] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.590s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.684497] env[63538]: DEBUG nova.compute.manager [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 942.690802] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.237s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.693585] env[63538]: DEBUG nova.objects.instance [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lazy-loading 'resources' on Instance uuid 1db1d558-2473-49cb-b309-f7192bd6b9c1 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 942.715240] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fa5a29c-d58b-4720-b648-4880d466a722 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "90e56075-0d77-467f-90be-913315b63b33" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.761s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.767819] env[63538]: DEBUG nova.compute.manager [req-86bbe3db-ec7b-455d-9593-e3d7d3613858 req-c0ad5ec2-799b-4bfa-b41a-621cc37d1c59 service nova] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Received event network-vif-deleted-68ccd913-2820-44c7-b00f-73f8c61e610e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 942.768701] env[63538]: INFO nova.compute.manager [req-86bbe3db-ec7b-455d-9593-e3d7d3613858 req-c0ad5ec2-799b-4bfa-b41a-621cc37d1c59 service nova] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Neutron deleted interface 68ccd913-2820-44c7-b00f-73f8c61e610e; detaching it from the instance and deleting it from the info cache [ 942.769789] env[63538]: DEBUG nova.network.neutron [req-86bbe3db-ec7b-455d-9593-e3d7d3613858 req-c0ad5ec2-799b-4bfa-b41a-621cc37d1c59 service nova] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.798015] env[63538]: INFO nova.compute.manager [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Took 38.94 seconds to build instance. [ 942.977049] env[63538]: DEBUG oslo_concurrency.lockutils [req-5908da3f-7ba7-4c41-8f10-3b779a144475 req-4107baac-e50d-4cd0-8109-25d0099e5152 service nova] Releasing lock "refresh_cache-8ed0bd15-71fc-435e-9e4a-90b023ad8a79" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.032805] env[63538]: DEBUG nova.network.neutron [-] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.058451] env[63538]: DEBUG nova.network.neutron [-] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.127091] env[63538]: DEBUG oslo_vmware.api [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f46a05-ed0b-d2be-be28-77b1b074e19e, 'name': SearchDatastore_Task, 'duration_secs': 0.010893} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.128195] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2eb8a199-2463-404f-a943-fc0763620146 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.140423] env[63538]: DEBUG oslo_vmware.api [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 943.140423] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5234b23b-6bb7-737e-423b-3e898c1682c0" [ 943.140423] env[63538]: _type = "Task" [ 943.140423] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.151794] env[63538]: DEBUG oslo_vmware.api [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5234b23b-6bb7-737e-423b-3e898c1682c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.193952] env[63538]: DEBUG nova.compute.utils [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 943.195530] env[63538]: DEBUG nova.compute.manager [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 943.195737] env[63538]: DEBUG nova.network.neutron [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 943.273654] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2a51c0d7-bec1-4442-b50f-716598f5fc90 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.287407] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1769d2fb-2089-4ca1-8140-72c4ba16c536 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.302652] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1fb0fd71-1f6d-459e-ae64-322a92b2e5d2 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "2e97b357-0200-4aed-9705-dd7808f853ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.922s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.304011] env[63538]: DEBUG nova.policy [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a11495c611974f26aaa6117bfda80179', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c3f6e933bf6c4e71af3b2a1e02d6e42f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 943.336804] env[63538]: DEBUG nova.compute.manager [req-86bbe3db-ec7b-455d-9593-e3d7d3613858 req-c0ad5ec2-799b-4bfa-b41a-621cc37d1c59 service nova] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Detach interface failed, port_id=68ccd913-2820-44c7-b00f-73f8c61e610e, reason: Instance 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 943.539578] env[63538]: INFO nova.compute.manager [-] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Took 1.61 seconds to deallocate network for instance. [ 943.561430] env[63538]: INFO nova.compute.manager [-] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Took 1.92 seconds to deallocate network for instance. [ 943.658900] env[63538]: DEBUG oslo_vmware.api [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5234b23b-6bb7-737e-423b-3e898c1682c0, 'name': SearchDatastore_Task, 'duration_secs': 0.011457} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.659268] env[63538]: DEBUG oslo_concurrency.lockutils [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.659554] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 8ed0bd15-71fc-435e-9e4a-90b023ad8a79/8ed0bd15-71fc-435e-9e4a-90b023ad8a79.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 943.659920] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-94da66a1-dafa-406b-b7ca-d0910a319620 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.670294] env[63538]: DEBUG oslo_vmware.api [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 943.670294] env[63538]: value = "task-5101283" [ 943.670294] env[63538]: _type = "Task" [ 943.670294] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.681235] env[63538]: DEBUG oslo_vmware.api [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101283, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.701701] env[63538]: DEBUG nova.compute.manager [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 943.716339] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46c63b06-7754-458e-89a6-4c651bc54a32 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.728455] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6dfc98-e8d5-4fc1-950f-2fb2a7a31738 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.763766] env[63538]: DEBUG nova.network.neutron [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Successfully created port: da4e2c60-afe9-437a-ba0d-55b9b358ae8f {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 943.768863] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-612dfd92-4fa9-4edd-a281-f130a745e5b9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.778949] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70bfb5ed-dcf5-4acf-8202-46f2a0599a31 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.794732] env[63538]: DEBUG nova.compute.provider_tree [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.886987] env[63538]: DEBUG nova.compute.manager [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 943.888061] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07bd0c7f-2944-44db-ac22-be914189462a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.891559] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "2e97b357-0200-4aed-9705-dd7808f853ba" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.891795] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "2e97b357-0200-4aed-9705-dd7808f853ba" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.891966] env[63538]: INFO nova.compute.manager [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Shelving [ 944.051537] env[63538]: DEBUG oslo_concurrency.lockutils [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.072967] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.185138] env[63538]: DEBUG oslo_vmware.api [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101283, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.297576] env[63538]: DEBUG nova.scheduler.client.report [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 944.402631] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 944.402945] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d9ea372b-3e5d-41ca-b4a5-96291e1eec95 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.407050] env[63538]: INFO nova.compute.manager [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] instance snapshotting [ 944.408751] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc3d53fa-de7b-40c3-b097-a6b5bf832c10 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.413586] env[63538]: DEBUG oslo_vmware.api [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 944.413586] env[63538]: value = "task-5101284" [ 944.413586] env[63538]: _type = "Task" [ 944.413586] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.438243] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e76c62ed-b4e9-48eb-aa97-effa836a0dba {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.441890] env[63538]: DEBUG oslo_vmware.api [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101284, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.683035] env[63538]: DEBUG oslo_vmware.api [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101283, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.567055} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.683997] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 8ed0bd15-71fc-435e-9e4a-90b023ad8a79/8ed0bd15-71fc-435e-9e4a-90b023ad8a79.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 944.683997] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 944.684291] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9599245c-cb10-4bd0-be27-28b9ac41255d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.691967] env[63538]: DEBUG oslo_vmware.api [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 944.691967] env[63538]: value = "task-5101285" [ 944.691967] env[63538]: _type = "Task" [ 944.691967] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.701206] env[63538]: DEBUG oslo_vmware.api [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101285, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.711652] env[63538]: DEBUG nova.compute.manager [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 944.736720] env[63538]: DEBUG nova.virt.hardware [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 944.737241] env[63538]: DEBUG nova.virt.hardware [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 944.737241] env[63538]: DEBUG nova.virt.hardware [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 944.737446] env[63538]: DEBUG nova.virt.hardware [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 944.737626] env[63538]: DEBUG nova.virt.hardware [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 944.737781] env[63538]: DEBUG nova.virt.hardware [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 944.738045] env[63538]: DEBUG nova.virt.hardware [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 944.738170] env[63538]: DEBUG nova.virt.hardware [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 944.738346] env[63538]: DEBUG nova.virt.hardware [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 944.738570] env[63538]: DEBUG nova.virt.hardware [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 944.738763] env[63538]: DEBUG nova.virt.hardware [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 944.739737] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50cc8063-f4d7-407c-b288-300de782e8b3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.748895] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c2dc872-f795-48e7-ab7c-b75a868c2173 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.799385] env[63538]: DEBUG nova.compute.manager [req-1cdf8e8a-1429-4a0c-8bb2-809ceb25e30b req-22f01fbd-a829-4592-b0bf-681dea27dcbf service nova] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Received event network-vif-deleted-0d48de93-8e4f-4795-a582-f00e76e60047 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 944.802794] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.112s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.805798] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.103s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.806072] env[63538]: DEBUG nova.objects.instance [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lazy-loading 'resources' on Instance uuid fa8ed101-914d-4751-ab9b-f68ad5da7a56 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 944.824572] env[63538]: INFO nova.scheduler.client.report [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Deleted allocations for instance 1db1d558-2473-49cb-b309-f7192bd6b9c1 [ 944.925325] env[63538]: DEBUG oslo_vmware.api [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101284, 'name': PowerOffVM_Task, 'duration_secs': 0.352248} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.925611] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 944.926464] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ff15e5b-e859-4bd9-9df1-74dd75b2dc83 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.948199] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32daa20c-2ed7-479c-a9ca-6c4a819810b3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.951823] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Creating Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 944.952927] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-25b551f0-9dfe-4699-80a1-7965df110dd7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.962754] env[63538]: DEBUG oslo_vmware.api [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 944.962754] env[63538]: value = "task-5101286" [ 944.962754] env[63538]: _type = "Task" [ 944.962754] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.971768] env[63538]: DEBUG oslo_vmware.api [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101286, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.202057] env[63538]: DEBUG oslo_vmware.api [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101285, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.199511} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.202448] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 945.203284] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef1489ae-1056-406a-87ab-26f078ad1c42 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.226745] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 8ed0bd15-71fc-435e-9e4a-90b023ad8a79/8ed0bd15-71fc-435e-9e4a-90b023ad8a79.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 945.228085] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db5da7b5-11b4-4464-a8fa-8a2d5a901924 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.249349] env[63538]: DEBUG oslo_vmware.api [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 945.249349] env[63538]: value = "task-5101287" [ 945.249349] env[63538]: _type = "Task" [ 945.249349] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.259760] env[63538]: DEBUG oslo_vmware.api [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101287, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.338901] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e8227ae0-1ff0-4bc9-bab3-d9750cbea698 tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "1db1d558-2473-49cb-b309-f7192bd6b9c1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.952s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.464957] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Creating Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 945.465507] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1e381371-12de-4e25-9a76-5e2f635902f0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.485152] env[63538]: DEBUG oslo_vmware.api [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101286, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.487471] env[63538]: DEBUG oslo_vmware.api [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 945.487471] env[63538]: value = "task-5101288" [ 945.487471] env[63538]: _type = "Task" [ 945.487471] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.501201] env[63538]: DEBUG oslo_vmware.api [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101288, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.526595] env[63538]: DEBUG nova.network.neutron [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Successfully updated port: da4e2c60-afe9-437a-ba0d-55b9b358ae8f {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 945.731561] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ced7fba-5485-460c-b8bb-597e710ef2e2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.740510] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c761cd3d-39d9-49fe-96c5-bd246474f1bc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.777988] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-089f578a-737e-4dae-baee-053bda863fe6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.786338] env[63538]: DEBUG oslo_vmware.api [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101287, 'name': ReconfigVM_Task, 'duration_secs': 0.474305} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.788644] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 8ed0bd15-71fc-435e-9e4a-90b023ad8a79/8ed0bd15-71fc-435e-9e4a-90b023ad8a79.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 945.789362] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5cc07c0f-988b-43d6-9c3f-ab993f304347 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.794019] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f962f38-aaf4-43ff-8060-7bbdddd4f984 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.808092] env[63538]: DEBUG nova.compute.provider_tree [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.810744] env[63538]: DEBUG oslo_vmware.api [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 945.810744] env[63538]: value = "task-5101289" [ 945.810744] env[63538]: _type = "Task" [ 945.810744] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.982561] env[63538]: DEBUG oslo_vmware.api [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101286, 'name': CreateSnapshot_Task, 'duration_secs': 0.869439} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.983034] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Created Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 945.984297] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeffdf09-e122-4a3f-aa13-a667243f4aa7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.013231] env[63538]: DEBUG oslo_vmware.api [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101288, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.030528] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquiring lock "refresh_cache-8097cb1c-bbba-45a8-be81-64d38decb1df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 946.030671] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquired lock "refresh_cache-8097cb1c-bbba-45a8-be81-64d38decb1df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.030796] env[63538]: DEBUG nova.network.neutron [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 946.313166] env[63538]: DEBUG nova.scheduler.client.report [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 946.327556] env[63538]: DEBUG oslo_vmware.api [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101289, 'name': Rename_Task, 'duration_secs': 0.202918} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.327700] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 946.327875] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4e53b385-5b3e-4a0e-b835-81b6ffd03fc3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.336153] env[63538]: DEBUG oslo_vmware.api [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 946.336153] env[63538]: value = "task-5101290" [ 946.336153] env[63538]: _type = "Task" [ 946.336153] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.345168] env[63538]: DEBUG oslo_vmware.api [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101290, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.513564] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Creating linked-clone VM from snapshot {{(pid=63538) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 946.513907] env[63538]: DEBUG oslo_vmware.api [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101288, 'name': CreateSnapshot_Task, 'duration_secs': 0.644717} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.514555] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0d2fabe2-ee9c-4a0d-b0cb-02fffcd68e60 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.517162] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Created Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 946.517924] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce273ebe-7398-4159-9cb9-9158608eddd2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.530510] env[63538]: DEBUG oslo_vmware.api [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 946.530510] env[63538]: value = "task-5101291" [ 946.530510] env[63538]: _type = "Task" [ 946.530510] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.541679] env[63538]: DEBUG oslo_vmware.api [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101291, 'name': CloneVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.570101] env[63538]: DEBUG nova.network.neutron [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 946.745834] env[63538]: DEBUG nova.network.neutron [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Updating instance_info_cache with network_info: [{"id": "da4e2c60-afe9-437a-ba0d-55b9b358ae8f", "address": "fa:16:3e:8a:cb:bc", "network": {"id": "8497b0d5-d275-4479-ae83-3ef4a1bb795b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-849919494-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3f6e933bf6c4e71af3b2a1e02d6e42f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ee018eb-75be-4037-a80a-07034d4eae35", "external-id": "nsx-vlan-transportzone-8", "segmentation_id": 8, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda4e2c60-af", "ovs_interfaceid": "da4e2c60-afe9-437a-ba0d-55b9b358ae8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.823836] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.017s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.826855] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.403s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.827140] env[63538]: DEBUG nova.objects.instance [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lazy-loading 'resources' on Instance uuid d967631f-5c8a-42d8-ac05-4cec3bdb55cf {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 946.849120] env[63538]: DEBUG oslo_vmware.api [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101290, 'name': PowerOnVM_Task, 'duration_secs': 0.494785} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.849812] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 946.849812] env[63538]: INFO nova.compute.manager [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Took 7.90 seconds to spawn the instance on the hypervisor. [ 946.849812] env[63538]: DEBUG nova.compute.manager [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 946.850655] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bea5d2ce-094a-4799-ab3d-c5d2acf6c33f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.863786] env[63538]: INFO nova.scheduler.client.report [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Deleted allocations for instance fa8ed101-914d-4751-ab9b-f68ad5da7a56 [ 947.023351] env[63538]: DEBUG nova.compute.manager [req-df6e3765-d119-4c99-bc89-3731b14734a4 req-bec57a65-f0a7-457c-b743-eab2da3485b9 service nova] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Received event network-vif-plugged-da4e2c60-afe9-437a-ba0d-55b9b358ae8f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 947.023588] env[63538]: DEBUG oslo_concurrency.lockutils [req-df6e3765-d119-4c99-bc89-3731b14734a4 req-bec57a65-f0a7-457c-b743-eab2da3485b9 service nova] Acquiring lock "8097cb1c-bbba-45a8-be81-64d38decb1df-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.024165] env[63538]: DEBUG oslo_concurrency.lockutils [req-df6e3765-d119-4c99-bc89-3731b14734a4 req-bec57a65-f0a7-457c-b743-eab2da3485b9 service nova] Lock "8097cb1c-bbba-45a8-be81-64d38decb1df-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.024165] env[63538]: DEBUG oslo_concurrency.lockutils [req-df6e3765-d119-4c99-bc89-3731b14734a4 req-bec57a65-f0a7-457c-b743-eab2da3485b9 service nova] Lock "8097cb1c-bbba-45a8-be81-64d38decb1df-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.024165] env[63538]: DEBUG nova.compute.manager [req-df6e3765-d119-4c99-bc89-3731b14734a4 req-bec57a65-f0a7-457c-b743-eab2da3485b9 service nova] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] No waiting events found dispatching network-vif-plugged-da4e2c60-afe9-437a-ba0d-55b9b358ae8f {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 947.024443] env[63538]: WARNING nova.compute.manager [req-df6e3765-d119-4c99-bc89-3731b14734a4 req-bec57a65-f0a7-457c-b743-eab2da3485b9 service nova] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Received unexpected event network-vif-plugged-da4e2c60-afe9-437a-ba0d-55b9b358ae8f for instance with vm_state building and task_state spawning. [ 947.024504] env[63538]: DEBUG nova.compute.manager [req-df6e3765-d119-4c99-bc89-3731b14734a4 req-bec57a65-f0a7-457c-b743-eab2da3485b9 service nova] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Received event network-changed-da4e2c60-afe9-437a-ba0d-55b9b358ae8f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 947.024654] env[63538]: DEBUG nova.compute.manager [req-df6e3765-d119-4c99-bc89-3731b14734a4 req-bec57a65-f0a7-457c-b743-eab2da3485b9 service nova] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Refreshing instance network info cache due to event network-changed-da4e2c60-afe9-437a-ba0d-55b9b358ae8f. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 947.024907] env[63538]: DEBUG oslo_concurrency.lockutils [req-df6e3765-d119-4c99-bc89-3731b14734a4 req-bec57a65-f0a7-457c-b743-eab2da3485b9 service nova] Acquiring lock "refresh_cache-8097cb1c-bbba-45a8-be81-64d38decb1df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.039129] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Creating linked-clone VM from snapshot {{(pid=63538) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 947.040419] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ccb77238-0d8a-4bd0-bdda-7bb617d42e34 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.053582] env[63538]: DEBUG oslo_vmware.api [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101291, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.055235] env[63538]: DEBUG oslo_vmware.api [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 947.055235] env[63538]: value = "task-5101292" [ 947.055235] env[63538]: _type = "Task" [ 947.055235] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.065474] env[63538]: DEBUG oslo_vmware.api [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101292, 'name': CloneVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.248316] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Releasing lock "refresh_cache-8097cb1c-bbba-45a8-be81-64d38decb1df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.248680] env[63538]: DEBUG nova.compute.manager [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Instance network_info: |[{"id": "da4e2c60-afe9-437a-ba0d-55b9b358ae8f", "address": "fa:16:3e:8a:cb:bc", "network": {"id": "8497b0d5-d275-4479-ae83-3ef4a1bb795b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-849919494-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3f6e933bf6c4e71af3b2a1e02d6e42f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ee018eb-75be-4037-a80a-07034d4eae35", "external-id": "nsx-vlan-transportzone-8", "segmentation_id": 8, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda4e2c60-af", "ovs_interfaceid": "da4e2c60-afe9-437a-ba0d-55b9b358ae8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 947.249019] env[63538]: DEBUG oslo_concurrency.lockutils [req-df6e3765-d119-4c99-bc89-3731b14734a4 req-bec57a65-f0a7-457c-b743-eab2da3485b9 service nova] Acquired lock "refresh_cache-8097cb1c-bbba-45a8-be81-64d38decb1df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.249209] env[63538]: DEBUG nova.network.neutron [req-df6e3765-d119-4c99-bc89-3731b14734a4 req-bec57a65-f0a7-457c-b743-eab2da3485b9 service nova] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Refreshing network info cache for port da4e2c60-afe9-437a-ba0d-55b9b358ae8f {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 947.250734] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:cb:bc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ee018eb-75be-4037-a80a-07034d4eae35', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'da4e2c60-afe9-437a-ba0d-55b9b358ae8f', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 947.259480] env[63538]: DEBUG oslo.service.loopingcall [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 947.262643] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 947.263218] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6e95fd4e-0085-4413-8674-00bb2e111d7e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.286769] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 947.286769] env[63538]: value = "task-5101293" [ 947.286769] env[63538]: _type = "Task" [ 947.286769] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.296225] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101293, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.380794] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4062f6d9-411c-44d2-a2a7-4658d86767ec tempest-MultipleCreateTestJSON-1148870144 tempest-MultipleCreateTestJSON-1148870144-project-member] Lock "fa8ed101-914d-4751-ab9b-f68ad5da7a56" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.689s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.381975] env[63538]: INFO nova.compute.manager [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Took 39.31 seconds to build instance. [ 947.550872] env[63538]: DEBUG oslo_vmware.api [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101291, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.579081] env[63538]: DEBUG oslo_vmware.api [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101292, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.718997] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "0df15328-aebd-44c5-9c78-ee05f188ad95" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.719833] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "0df15328-aebd-44c5-9c78-ee05f188ad95" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.720054] env[63538]: INFO nova.compute.manager [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Shelving [ 947.751719] env[63538]: DEBUG nova.network.neutron [req-df6e3765-d119-4c99-bc89-3731b14734a4 req-bec57a65-f0a7-457c-b743-eab2da3485b9 service nova] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Updated VIF entry in instance network info cache for port da4e2c60-afe9-437a-ba0d-55b9b358ae8f. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 947.752245] env[63538]: DEBUG nova.network.neutron [req-df6e3765-d119-4c99-bc89-3731b14734a4 req-bec57a65-f0a7-457c-b743-eab2da3485b9 service nova] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Updating instance_info_cache with network_info: [{"id": "da4e2c60-afe9-437a-ba0d-55b9b358ae8f", "address": "fa:16:3e:8a:cb:bc", "network": {"id": "8497b0d5-d275-4479-ae83-3ef4a1bb795b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-849919494-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3f6e933bf6c4e71af3b2a1e02d6e42f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ee018eb-75be-4037-a80a-07034d4eae35", "external-id": "nsx-vlan-transportzone-8", "segmentation_id": 8, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda4e2c60-af", "ovs_interfaceid": "da4e2c60-afe9-437a-ba0d-55b9b358ae8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.803533] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101293, 'name': CreateVM_Task, 'duration_secs': 0.503995} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.807713] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 947.808909] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.809162] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.809644] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 947.809972] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b7c8faa-dfcd-4c6f-9118-bb51a587144f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.816277] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Waiting for the task: (returnval){ [ 947.816277] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525bcc69-8094-8449-f2bf-26bc6c3fc993" [ 947.816277] env[63538]: _type = "Task" [ 947.816277] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.822020] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-162070eb-147d-44bb-aabe-aa61d84c198d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.828247] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525bcc69-8094-8449-f2bf-26bc6c3fc993, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.833631] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e5269b-1ba8-49b3-9a11-af6ee9aeeae2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.866649] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d7ffc18-2231-4c2c-9c28-0b4a8e73370b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.879574] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5defc84-9468-4b96-bee7-fb5baaff3dcd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.900290] env[63538]: DEBUG oslo_concurrency.lockutils [None req-676eaf53-8d96-42e2-be3c-fba8dadee554 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "8ed0bd15-71fc-435e-9e4a-90b023ad8a79" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.575s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.900971] env[63538]: DEBUG nova.compute.provider_tree [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 948.050345] env[63538]: DEBUG oslo_vmware.api [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101291, 'name': CloneVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.072186] env[63538]: DEBUG oslo_vmware.api [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101292, 'name': CloneVM_Task} progress is 95%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.227251] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 948.227518] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d5aee82-2fe3-4924-b694-0af67d8773bc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.235246] env[63538]: DEBUG oslo_vmware.api [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 948.235246] env[63538]: value = "task-5101294" [ 948.235246] env[63538]: _type = "Task" [ 948.235246] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.243638] env[63538]: DEBUG oslo_vmware.api [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101294, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.255108] env[63538]: DEBUG oslo_concurrency.lockutils [req-df6e3765-d119-4c99-bc89-3731b14734a4 req-bec57a65-f0a7-457c-b743-eab2da3485b9 service nova] Releasing lock "refresh_cache-8097cb1c-bbba-45a8-be81-64d38decb1df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.331886] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525bcc69-8094-8449-f2bf-26bc6c3fc993, 'name': SearchDatastore_Task, 'duration_secs': 0.012137} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.332374] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.332697] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 948.333095] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.333308] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.333567] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 948.333887] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6b2086b-6e25-4cfe-adb2-14930a929640 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.344819] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 948.345114] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 948.345926] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1ca566e-7e26-4cad-a59e-db9918a51e5c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.353803] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Waiting for the task: (returnval){ [ 948.353803] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d05dd6-dcee-86d6-b22f-987681d94d43" [ 948.353803] env[63538]: _type = "Task" [ 948.353803] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.364108] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d05dd6-dcee-86d6-b22f-987681d94d43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.441204] env[63538]: DEBUG nova.scheduler.client.report [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Updated inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 with generation 108 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 948.441517] env[63538]: DEBUG nova.compute.provider_tree [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Updating resource provider f65218a4-1d3d-476a-9093-01cae92c8635 generation from 108 to 109 during operation: update_inventory {{(pid=63538) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 948.441676] env[63538]: DEBUG nova.compute.provider_tree [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 948.553330] env[63538]: DEBUG oslo_vmware.api [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101291, 'name': CloneVM_Task, 'duration_secs': 1.610336} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.553615] env[63538]: INFO nova.virt.vmwareapi.vmops [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Created linked-clone VM from snapshot [ 948.554557] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fcec3dc-d934-4555-8790-ea9b64f93e82 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.567100] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Uploading image b299d3c5-b48d-4f1f-a911-692e6a986d0c {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 948.575762] env[63538]: DEBUG oslo_vmware.api [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101292, 'name': CloneVM_Task, 'duration_secs': 1.208801} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.576083] env[63538]: INFO nova.virt.vmwareapi.vmops [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Created linked-clone VM from snapshot [ 948.576915] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2773cc9-bb38-4a45-8b4a-db0cdfc185b7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.589398] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Uploading image b7292989-b20d-44bb-b54a-80b3b3bed442 {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 948.609761] env[63538]: DEBUG oslo_vmware.rw_handles [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 948.609761] env[63538]: value = "vm-992450" [ 948.609761] env[63538]: _type = "VirtualMachine" [ 948.609761] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 948.609761] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-7b49441f-6613-402e-b62b-5184030dfb25 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.616777] env[63538]: DEBUG oslo_vmware.rw_handles [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lease: (returnval){ [ 948.616777] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5210652c-7715-7ba9-c8c3-aa9c0f0ea77e" [ 948.616777] env[63538]: _type = "HttpNfcLease" [ 948.616777] env[63538]: } obtained for exporting VM: (result){ [ 948.616777] env[63538]: value = "vm-992450" [ 948.616777] env[63538]: _type = "VirtualMachine" [ 948.616777] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 948.617057] env[63538]: DEBUG oslo_vmware.api [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the lease: (returnval){ [ 948.617057] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5210652c-7715-7ba9-c8c3-aa9c0f0ea77e" [ 948.617057] env[63538]: _type = "HttpNfcLease" [ 948.617057] env[63538]: } to be ready. {{(pid=63538) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 948.627466] env[63538]: DEBUG oslo_vmware.rw_handles [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 948.627466] env[63538]: value = "vm-992451" [ 948.627466] env[63538]: _type = "VirtualMachine" [ 948.627466] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 948.627890] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-3d150045-a894-4be8-90cf-4819b070953c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.630891] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 948.630891] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5210652c-7715-7ba9-c8c3-aa9c0f0ea77e" [ 948.630891] env[63538]: _type = "HttpNfcLease" [ 948.630891] env[63538]: } is initializing. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 948.640269] env[63538]: DEBUG oslo_vmware.rw_handles [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lease: (returnval){ [ 948.640269] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c07db5-6a1c-1fa5-de3b-1fbd07820dd7" [ 948.640269] env[63538]: _type = "HttpNfcLease" [ 948.640269] env[63538]: } obtained for exporting VM: (result){ [ 948.640269] env[63538]: value = "vm-992451" [ 948.640269] env[63538]: _type = "VirtualMachine" [ 948.640269] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 948.640269] env[63538]: DEBUG oslo_vmware.api [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the lease: (returnval){ [ 948.640269] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c07db5-6a1c-1fa5-de3b-1fbd07820dd7" [ 948.640269] env[63538]: _type = "HttpNfcLease" [ 948.640269] env[63538]: } to be ready. {{(pid=63538) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 948.647199] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 948.647199] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c07db5-6a1c-1fa5-de3b-1fbd07820dd7" [ 948.647199] env[63538]: _type = "HttpNfcLease" [ 948.647199] env[63538]: } is initializing. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 948.747429] env[63538]: DEBUG oslo_vmware.api [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101294, 'name': PowerOffVM_Task, 'duration_secs': 0.308624} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.747745] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 948.748650] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70cc0f1b-19eb-478e-abb4-e6b7dd9ccb8d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.777314] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de7cd1fd-23b1-4769-b041-b6c6cb930835 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.867803] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d05dd6-dcee-86d6-b22f-987681d94d43, 'name': SearchDatastore_Task, 'duration_secs': 0.010938} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.868668] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bf33068-8fa2-4031-ad8e-eb6a9a3ce455 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.876836] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Waiting for the task: (returnval){ [ 948.876836] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52946724-41ee-2669-f4a2-29b1c2e78b24" [ 948.876836] env[63538]: _type = "Task" [ 948.876836] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.889765] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52946724-41ee-2669-f4a2-29b1c2e78b24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.947962] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.122s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.951536] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.678s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.951815] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.954644] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.656s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.954965] env[63538]: DEBUG nova.objects.instance [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Lazy-loading 'resources' on Instance uuid f1838794-710c-4bea-9e73-f6912e1b69f5 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 948.992037] env[63538]: INFO nova.scheduler.client.report [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Deleted allocations for instance a2e036ae-318b-44ea-9db0-10fa3838728b [ 948.995861] env[63538]: INFO nova.scheduler.client.report [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Deleted allocations for instance d967631f-5c8a-42d8-ac05-4cec3bdb55cf [ 949.052198] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b0ecdd4d-280e-4882-8e57-5da7f8325e52 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "edcc5700-7b1e-494a-82d1-844373a9d5a6" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.053065] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b0ecdd4d-280e-4882-8e57-5da7f8325e52 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "edcc5700-7b1e-494a-82d1-844373a9d5a6" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.002s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.132156] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 949.132156] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5210652c-7715-7ba9-c8c3-aa9c0f0ea77e" [ 949.132156] env[63538]: _type = "HttpNfcLease" [ 949.132156] env[63538]: } is ready. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 949.132500] env[63538]: DEBUG oslo_vmware.rw_handles [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 949.132500] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5210652c-7715-7ba9-c8c3-aa9c0f0ea77e" [ 949.132500] env[63538]: _type = "HttpNfcLease" [ 949.132500] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 949.133462] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2931471f-94fa-4472-a5c6-941759f9265c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.147457] env[63538]: DEBUG oslo_vmware.rw_handles [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521707f7-33ee-5ba6-1604-520a3825891d/disk-0.vmdk from lease info. {{(pid=63538) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 949.147709] env[63538]: DEBUG oslo_vmware.rw_handles [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521707f7-33ee-5ba6-1604-520a3825891d/disk-0.vmdk for reading. {{(pid=63538) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 949.220420] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 949.220420] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c07db5-6a1c-1fa5-de3b-1fbd07820dd7" [ 949.220420] env[63538]: _type = "HttpNfcLease" [ 949.220420] env[63538]: } is ready. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 949.220964] env[63538]: DEBUG oslo_vmware.rw_handles [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 949.220964] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c07db5-6a1c-1fa5-de3b-1fbd07820dd7" [ 949.220964] env[63538]: _type = "HttpNfcLease" [ 949.220964] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 949.224107] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6b62bc-fc97-4d11-90b3-5af51404f3eb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.237451] env[63538]: DEBUG oslo_vmware.rw_handles [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5293335c-db27-60ad-45f1-802f8946fd2e/disk-0.vmdk from lease info. {{(pid=63538) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 949.237797] env[63538]: DEBUG oslo_vmware.rw_handles [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5293335c-db27-60ad-45f1-802f8946fd2e/disk-0.vmdk for reading. {{(pid=63538) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 949.305266] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Creating Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 949.306669] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b517d17e-8ede-472e-8780-858ea73dd49a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.309863] env[63538]: DEBUG nova.compute.manager [req-039b75d4-2509-4e40-9093-69f867c945c2 req-b8ec715f-3414-4f15-9189-3b3cad032476 service nova] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Received event network-changed-3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 949.310069] env[63538]: DEBUG nova.compute.manager [req-039b75d4-2509-4e40-9093-69f867c945c2 req-b8ec715f-3414-4f15-9189-3b3cad032476 service nova] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Refreshing instance network info cache due to event network-changed-3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 949.310310] env[63538]: DEBUG oslo_concurrency.lockutils [req-039b75d4-2509-4e40-9093-69f867c945c2 req-b8ec715f-3414-4f15-9189-3b3cad032476 service nova] Acquiring lock "refresh_cache-8ed0bd15-71fc-435e-9e4a-90b023ad8a79" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.310461] env[63538]: DEBUG oslo_concurrency.lockutils [req-039b75d4-2509-4e40-9093-69f867c945c2 req-b8ec715f-3414-4f15-9189-3b3cad032476 service nova] Acquired lock "refresh_cache-8ed0bd15-71fc-435e-9e4a-90b023ad8a79" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.310625] env[63538]: DEBUG nova.network.neutron [req-039b75d4-2509-4e40-9093-69f867c945c2 req-b8ec715f-3414-4f15-9189-3b3cad032476 service nova] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Refreshing network info cache for port 3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 949.318950] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-094aa11c-244c-4eb6-86a6-53fe9e04828b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.322479] env[63538]: DEBUG oslo_vmware.api [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 949.322479] env[63538]: value = "task-5101297" [ 949.322479] env[63538]: _type = "Task" [ 949.322479] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.337677] env[63538]: DEBUG oslo_vmware.api [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101297, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.370553] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4a425a75-6ada-450b-a10d-dc044462b7de {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.393521] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52946724-41ee-2669-f4a2-29b1c2e78b24, 'name': SearchDatastore_Task, 'duration_secs': 0.038259} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.396735] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.397192] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 8097cb1c-bbba-45a8-be81-64d38decb1df/8097cb1c-bbba-45a8-be81-64d38decb1df.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 949.398405] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-47ec1026-ec10-42a5-8b93-b3a071d2d276 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.409309] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Waiting for the task: (returnval){ [ 949.409309] env[63538]: value = "task-5101298" [ 949.409309] env[63538]: _type = "Task" [ 949.409309] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.430453] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101298, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.511770] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d8a6e8ab-5129-47e8-9b72-b307545e3e80 tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "a2e036ae-318b-44ea-9db0-10fa3838728b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.630s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.515993] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f420a587-069c-4c14-8c3f-42aae9596b7a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "d967631f-5c8a-42d8-ac05-4cec3bdb55cf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.375s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.559913] env[63538]: DEBUG nova.compute.utils [None req-b0ecdd4d-280e-4882-8e57-5da7f8325e52 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 949.851416] env[63538]: DEBUG oslo_vmware.api [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101297, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.936358] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101298, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.065056] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b0ecdd4d-280e-4882-8e57-5da7f8325e52 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "edcc5700-7b1e-494a-82d1-844373a9d5a6" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.012s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.112413] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a35172ef-64eb-4419-8e9d-8edefdcc14c6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.133342] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb5283dd-12b2-4c84-a6d4-9b7d88dca831 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.194288] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e14fbe35-fd9d-4153-987a-b6baf2c52e79 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.207718] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5187395-48be-45b6-83c4-c1c168c47329 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.229514] env[63538]: DEBUG nova.compute.provider_tree [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 950.340916] env[63538]: DEBUG oslo_vmware.api [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101297, 'name': CreateSnapshot_Task, 'duration_secs': 0.708212} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.341442] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Created Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 950.343050] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89bcb545-7da2-4fb5-89da-158ff2aaadd4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.422419] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101298, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.601152} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.422889] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 8097cb1c-bbba-45a8-be81-64d38decb1df/8097cb1c-bbba-45a8-be81-64d38decb1df.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 950.423360] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 950.423678] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7fe6e9ed-fe45-43e5-9a12-a42045d3aa45 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.436110] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Waiting for the task: (returnval){ [ 950.436110] env[63538]: value = "task-5101299" [ 950.436110] env[63538]: _type = "Task" [ 950.436110] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.450395] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101299, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.688625] env[63538]: DEBUG nova.network.neutron [req-039b75d4-2509-4e40-9093-69f867c945c2 req-b8ec715f-3414-4f15-9189-3b3cad032476 service nova] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Updated VIF entry in instance network info cache for port 3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 950.688625] env[63538]: DEBUG nova.network.neutron [req-039b75d4-2509-4e40-9093-69f867c945c2 req-b8ec715f-3414-4f15-9189-3b3cad032476 service nova] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Updating instance_info_cache with network_info: [{"id": "3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46", "address": "fa:16:3e:63:ce:d8", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.140", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c0dc74b-3e", "ovs_interfaceid": "3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.735264] env[63538]: DEBUG nova.scheduler.client.report [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 950.867650] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Creating linked-clone VM from snapshot {{(pid=63538) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 950.868379] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-cb9faf9c-f4bb-4898-85b8-3509a16d7f2e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.878700] env[63538]: DEBUG oslo_vmware.api [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 950.878700] env[63538]: value = "task-5101300" [ 950.878700] env[63538]: _type = "Task" [ 950.878700] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.889610] env[63538]: DEBUG oslo_vmware.api [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101300, 'name': CloneVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.952023] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101299, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.150891} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.952023] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 950.952023] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a79a421-571a-473e-9d13-09f1c21720e1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.984626] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] 8097cb1c-bbba-45a8-be81-64d38decb1df/8097cb1c-bbba-45a8-be81-64d38decb1df.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 950.985658] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4fa10915-5c95-4129-8e99-36d87fe75d53 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.012384] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Waiting for the task: (returnval){ [ 951.012384] env[63538]: value = "task-5101301" [ 951.012384] env[63538]: _type = "Task" [ 951.012384] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.025827] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101301, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.154912] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b0ecdd4d-280e-4882-8e57-5da7f8325e52 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "edcc5700-7b1e-494a-82d1-844373a9d5a6" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.155655] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b0ecdd4d-280e-4882-8e57-5da7f8325e52 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "edcc5700-7b1e-494a-82d1-844373a9d5a6" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.156221] env[63538]: INFO nova.compute.manager [None req-b0ecdd4d-280e-4882-8e57-5da7f8325e52 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Attaching volume 29ea8eee-32fd-404e-9a51-9211e8b1a496 to /dev/sdb [ 951.189672] env[63538]: DEBUG oslo_concurrency.lockutils [req-039b75d4-2509-4e40-9093-69f867c945c2 req-b8ec715f-3414-4f15-9189-3b3cad032476 service nova] Releasing lock "refresh_cache-8ed0bd15-71fc-435e-9e4a-90b023ad8a79" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.208851] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a3befbd-d86b-463d-acf7-64ff4c04cd3f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.218247] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a504bdf2-965d-49e5-9872-de41589050d3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.235363] env[63538]: DEBUG nova.virt.block_device [None req-b0ecdd4d-280e-4882-8e57-5da7f8325e52 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Updating existing volume attachment record: 5d52825c-e0f6-4b89-af37-132cab3e8319 {{(pid=63538) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 951.242297] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.287s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.245451] env[63538]: DEBUG oslo_concurrency.lockutils [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.196s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.247609] env[63538]: DEBUG nova.objects.instance [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lazy-loading 'resources' on Instance uuid c8a02fa6-5232-4dde-b6dd-0da1089b6bbf {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 951.280676] env[63538]: INFO nova.scheduler.client.report [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Deleted allocations for instance f1838794-710c-4bea-9e73-f6912e1b69f5 [ 951.391286] env[63538]: DEBUG oslo_vmware.api [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101300, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.526548] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101301, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.789660] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8b94512d-70f6-4716-b3d6-d1f6ce2887f6 tempest-AttachInterfacesV270Test-102600570 tempest-AttachInterfacesV270Test-102600570-project-member] Lock "f1838794-710c-4bea-9e73-f6912e1b69f5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.029s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.826109] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "2e1b0bc7-3909-48e2-b9be-26822a57ee67" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.827715] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "2e1b0bc7-3909-48e2-b9be-26822a57ee67" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.827715] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "2e1b0bc7-3909-48e2-b9be-26822a57ee67-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.827715] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "2e1b0bc7-3909-48e2-b9be-26822a57ee67-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.827715] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "2e1b0bc7-3909-48e2-b9be-26822a57ee67-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.832901] env[63538]: INFO nova.compute.manager [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Terminating instance [ 951.836814] env[63538]: DEBUG nova.compute.manager [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 951.837034] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 951.837956] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65336aaa-a263-4f9e-b558-c1399968e9f7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.846968] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 951.850259] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d6e5baff-5345-4ed3-8244-770cbf9651ea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.858203] env[63538]: DEBUG oslo_vmware.api [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 951.858203] env[63538]: value = "task-5101304" [ 951.858203] env[63538]: _type = "Task" [ 951.858203] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.892309] env[63538]: DEBUG oslo_vmware.api [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101300, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.034604] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101301, 'name': ReconfigVM_Task, 'duration_secs': 0.698601} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.035636] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Reconfigured VM instance instance-00000052 to attach disk [datastore2] 8097cb1c-bbba-45a8-be81-64d38decb1df/8097cb1c-bbba-45a8-be81-64d38decb1df.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 952.035798] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-38638ca2-0adb-4865-80a0-a6928203eac9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.045654] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Waiting for the task: (returnval){ [ 952.045654] env[63538]: value = "task-5101306" [ 952.045654] env[63538]: _type = "Task" [ 952.045654] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.056817] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101306, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.240348] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44dc2628-92d3-4fb7-8752-5b101d3cbd9e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.250868] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cebcd28f-b610-40c4-9e08-0a8a6b4760e7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.295458] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae30af6f-7fd4-45b1-b2a4-b964b215eabc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.306043] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba3868e-44ed-4a6d-98c0-7c92b68bfb89 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.328116] env[63538]: DEBUG nova.compute.provider_tree [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 952.371975] env[63538]: DEBUG oslo_vmware.api [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101304, 'name': PowerOffVM_Task, 'duration_secs': 0.307798} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.372560] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 952.373076] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 952.373295] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-288cc01d-722b-4c8a-b99b-fd53a13d4965 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.393949] env[63538]: DEBUG oslo_vmware.api [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101300, 'name': CloneVM_Task, 'duration_secs': 1.440552} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.393949] env[63538]: INFO nova.virt.vmwareapi.vmops [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Created linked-clone VM from snapshot [ 952.394969] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0015104-a0d5-4065-b504-db16da6794d3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.407473] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Uploading image 6185ea79-9c71-4180-85df-f64f05052bed {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 952.453261] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 952.453261] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 952.453585] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Deleting the datastore file [datastore2] 2e1b0bc7-3909-48e2-b9be-26822a57ee67 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 952.453821] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d24c3131-150a-4877-8beb-68f0f4fd9b55 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.462720] env[63538]: DEBUG oslo_vmware.api [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for the task: (returnval){ [ 952.462720] env[63538]: value = "task-5101308" [ 952.462720] env[63538]: _type = "Task" [ 952.462720] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.475798] env[63538]: DEBUG oslo_vmware.api [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101308, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.556862] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101306, 'name': Rename_Task, 'duration_secs': 0.226019} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.557196] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 952.557533] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4ed60346-3b80-4e82-938e-3f09a35644f4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.566040] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Waiting for the task: (returnval){ [ 952.566040] env[63538]: value = "task-5101309" [ 952.566040] env[63538]: _type = "Task" [ 952.566040] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.576179] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101309, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.831012] env[63538]: DEBUG nova.scheduler.client.report [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 952.985959] env[63538]: DEBUG oslo_vmware.api [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Task: {'id': task-5101308, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173176} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.986369] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 952.987121] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 952.987246] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 952.987540] env[63538]: INFO nova.compute.manager [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Took 1.15 seconds to destroy the instance on the hypervisor. [ 952.988172] env[63538]: DEBUG oslo.service.loopingcall [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 952.988262] env[63538]: DEBUG nova.compute.manager [-] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 952.988350] env[63538]: DEBUG nova.network.neutron [-] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 953.081890] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101309, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.341561] env[63538]: DEBUG oslo_concurrency.lockutils [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.096s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.344645] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.181s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.344971] env[63538]: DEBUG nova.objects.instance [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Lazy-loading 'resources' on Instance uuid b47925eb-3d97-415b-9410-2e325da5ce79 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 953.393583] env[63538]: INFO nova.scheduler.client.report [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Deleted allocations for instance c8a02fa6-5232-4dde-b6dd-0da1089b6bbf [ 953.583657] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101309, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.908869] env[63538]: DEBUG oslo_concurrency.lockutils [None req-13780f45-b688-4499-a400-af0d3a5a9806 tempest-ServersAdminTestJSON-5453135 tempest-ServersAdminTestJSON-5453135-project-member] Lock "c8a02fa6-5232-4dde-b6dd-0da1089b6bbf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.273s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.003571] env[63538]: DEBUG nova.network.neutron [-] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.085974] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101309, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.271645] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec697118-ef3f-4625-899f-8db51a40774c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.281775] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f915b2dc-cece-4ca5-a5d4-fe37d5e295f3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.321055] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a716d23-e1ba-4522-9acb-313a359e1a73 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.332671] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76c70d4-b550-4a24-a33b-3a8324856c99 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.355520] env[63538]: DEBUG nova.compute.provider_tree [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 954.507200] env[63538]: INFO nova.compute.manager [-] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Took 1.52 seconds to deallocate network for instance. [ 954.582454] env[63538]: DEBUG oslo_vmware.api [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101309, 'name': PowerOnVM_Task, 'duration_secs': 1.596508} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.582780] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 954.583496] env[63538]: INFO nova.compute.manager [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Took 9.87 seconds to spawn the instance on the hypervisor. [ 954.583496] env[63538]: DEBUG nova.compute.manager [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 954.584789] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31c0fea-25de-47e7-9d2b-237ed0e1d757 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.889768] env[63538]: DEBUG nova.scheduler.client.report [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Updated inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 with generation 109 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 954.890049] env[63538]: DEBUG nova.compute.provider_tree [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Updating resource provider f65218a4-1d3d-476a-9093-01cae92c8635 generation from 109 to 110 during operation: update_inventory {{(pid=63538) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 954.890240] env[63538]: DEBUG nova.compute.provider_tree [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 955.014837] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.105251] env[63538]: INFO nova.compute.manager [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Took 45.00 seconds to build instance. [ 955.395953] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.052s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.399100] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.735s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.404444] env[63538]: INFO nova.compute.claims [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 955.432712] env[63538]: INFO nova.scheduler.client.report [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Deleted allocations for instance b47925eb-3d97-415b-9410-2e325da5ce79 [ 955.607498] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3ad0726d-c130-445a-98d1-3b88a364bb95 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Lock "8097cb1c-bbba-45a8-be81-64d38decb1df" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.170s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.943080] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b736d27d-f2e0-4cc2-965c-723e6d88f421 tempest-ServersTestMultiNic-1282873565 tempest-ServersTestMultiNic-1282873565-project-member] Lock "b47925eb-3d97-415b-9410-2e325da5ce79" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.591s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.302804] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0ecdd4d-280e-4882-8e57-5da7f8325e52 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Volume attach. Driver type: vmdk {{(pid=63538) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 956.303392] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0ecdd4d-280e-4882-8e57-5da7f8325e52 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992456', 'volume_id': '29ea8eee-32fd-404e-9a51-9211e8b1a496', 'name': 'volume-29ea8eee-32fd-404e-9a51-9211e8b1a496', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'edcc5700-7b1e-494a-82d1-844373a9d5a6', 'attached_at': '', 'detached_at': '', 'volume_id': '29ea8eee-32fd-404e-9a51-9211e8b1a496', 'serial': '29ea8eee-32fd-404e-9a51-9211e8b1a496'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 956.304554] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ae0c26-dfba-41d9-8e0b-8697bf014c2e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.330217] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecfa2eb9-4204-4a84-8361-cbd7fd1d3aeb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.364911] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0ecdd4d-280e-4882-8e57-5da7f8325e52 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] volume-29ea8eee-32fd-404e-9a51-9211e8b1a496/volume-29ea8eee-32fd-404e-9a51-9211e8b1a496.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 956.365345] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-16bf13b4-895a-452b-9fa9-0c84c3c39676 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.388191] env[63538]: DEBUG oslo_vmware.api [None req-b0ecdd4d-280e-4882-8e57-5da7f8325e52 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 956.388191] env[63538]: value = "task-5101311" [ 956.388191] env[63538]: _type = "Task" [ 956.388191] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.401362] env[63538]: DEBUG oslo_vmware.api [None req-b0ecdd4d-280e-4882-8e57-5da7f8325e52 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101311, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.716699] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8225b90a-4712-45e9-954a-654261ba1fb6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.725330] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2063fe48-a234-400b-a3b0-560832a3dd3a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.759820] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-336dacac-73b7-44c0-8303-14b2fed87162 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.768852] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b42645-877b-48e6-85ea-186bb20dde09 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.784429] env[63538]: DEBUG nova.compute.provider_tree [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 956.899849] env[63538]: DEBUG oslo_vmware.api [None req-b0ecdd4d-280e-4882-8e57-5da7f8325e52 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101311, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.287982] env[63538]: DEBUG nova.scheduler.client.report [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 957.400099] env[63538]: DEBUG oslo_vmware.api [None req-b0ecdd4d-280e-4882-8e57-5da7f8325e52 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101311, 'name': ReconfigVM_Task, 'duration_secs': 0.731034} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.400582] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0ecdd4d-280e-4882-8e57-5da7f8325e52 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Reconfigured VM instance instance-00000048 to attach disk [datastore2] volume-29ea8eee-32fd-404e-9a51-9211e8b1a496/volume-29ea8eee-32fd-404e-9a51-9211e8b1a496.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 957.405407] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-861a1e92-2295-44b4-8a4c-b3d829eee07d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.421633] env[63538]: DEBUG oslo_vmware.api [None req-b0ecdd4d-280e-4882-8e57-5da7f8325e52 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 957.421633] env[63538]: value = "task-5101312" [ 957.421633] env[63538]: _type = "Task" [ 957.421633] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.431168] env[63538]: DEBUG oslo_vmware.api [None req-b0ecdd4d-280e-4882-8e57-5da7f8325e52 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101312, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.774923] env[63538]: DEBUG oslo_vmware.rw_handles [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 957.774923] env[63538]: value = "vm-992454" [ 957.774923] env[63538]: _type = "VirtualMachine" [ 957.774923] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 957.777098] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-423ad50d-d8a2-430f-b2cf-a46c174fb786 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.785325] env[63538]: DEBUG oslo_vmware.rw_handles [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521707f7-33ee-5ba6-1604-520a3825891d/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 957.786834] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3074aeea-a8c5-49bb-84b4-e0de9cd20b87 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.790417] env[63538]: DEBUG oslo_vmware.rw_handles [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lease: (returnval){ [ 957.790417] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52552342-b7f8-572f-e35b-544ff02361fc" [ 957.790417] env[63538]: _type = "HttpNfcLease" [ 957.790417] env[63538]: } obtained for exporting VM: (result){ [ 957.790417] env[63538]: value = "vm-992454" [ 957.790417] env[63538]: _type = "VirtualMachine" [ 957.790417] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 957.790756] env[63538]: DEBUG oslo_vmware.api [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the lease: (returnval){ [ 957.790756] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52552342-b7f8-572f-e35b-544ff02361fc" [ 957.790756] env[63538]: _type = "HttpNfcLease" [ 957.790756] env[63538]: } to be ready. {{(pid=63538) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 957.796496] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.398s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.796991] env[63538]: DEBUG nova.compute.manager [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 957.799552] env[63538]: DEBUG oslo_vmware.rw_handles [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521707f7-33ee-5ba6-1604-520a3825891d/disk-0.vmdk is in state: ready. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 957.799715] env[63538]: ERROR oslo_vmware.rw_handles [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521707f7-33ee-5ba6-1604-520a3825891d/disk-0.vmdk due to incomplete transfer. [ 957.800206] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.543s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.801669] env[63538]: INFO nova.compute.claims [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 957.804357] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-1cc2243a-3a1c-4f72-bc63-fe3dd4f018cd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.807932] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 957.807932] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52552342-b7f8-572f-e35b-544ff02361fc" [ 957.807932] env[63538]: _type = "HttpNfcLease" [ 957.807932] env[63538]: } is ready. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 957.808580] env[63538]: DEBUG oslo_vmware.rw_handles [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 957.808580] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52552342-b7f8-572f-e35b-544ff02361fc" [ 957.808580] env[63538]: _type = "HttpNfcLease" [ 957.808580] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 957.809403] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb72ed4-be6d-49b8-908c-1150fc9cc84e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.814375] env[63538]: DEBUG oslo_vmware.rw_handles [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521707f7-33ee-5ba6-1604-520a3825891d/disk-0.vmdk. {{(pid=63538) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 957.814598] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Uploaded image b299d3c5-b48d-4f1f-a911-692e6a986d0c to the Glance image server {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 957.818876] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Destroying the VM {{(pid=63538) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 957.825326] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a0ce40de-ce91-4a07-ad34-e93934d0a6ea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.826804] env[63538]: DEBUG oslo_vmware.rw_handles [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b68626-60fe-67bd-25da-5d3a0f30f0a1/disk-0.vmdk from lease info. {{(pid=63538) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 957.827018] env[63538]: DEBUG oslo_vmware.rw_handles [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b68626-60fe-67bd-25da-5d3a0f30f0a1/disk-0.vmdk for reading. {{(pid=63538) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 957.888748] env[63538]: DEBUG oslo_vmware.api [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 957.888748] env[63538]: value = "task-5101314" [ 957.888748] env[63538]: _type = "Task" [ 957.888748] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.900089] env[63538]: DEBUG oslo_vmware.api [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101314, 'name': Destroy_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.933771] env[63538]: DEBUG oslo_vmware.api [None req-b0ecdd4d-280e-4882-8e57-5da7f8325e52 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101312, 'name': ReconfigVM_Task, 'duration_secs': 0.181154} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.934120] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0ecdd4d-280e-4882-8e57-5da7f8325e52 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992456', 'volume_id': '29ea8eee-32fd-404e-9a51-9211e8b1a496', 'name': 'volume-29ea8eee-32fd-404e-9a51-9211e8b1a496', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'edcc5700-7b1e-494a-82d1-844373a9d5a6', 'attached_at': '', 'detached_at': '', 'volume_id': '29ea8eee-32fd-404e-9a51-9211e8b1a496', 'serial': '29ea8eee-32fd-404e-9a51-9211e8b1a496'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 957.955269] env[63538]: DEBUG oslo_vmware.rw_handles [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5293335c-db27-60ad-45f1-802f8946fd2e/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 957.956790] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62791cc1-e06f-445a-af65-e58cf7248821 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.964248] env[63538]: DEBUG oslo_vmware.rw_handles [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5293335c-db27-60ad-45f1-802f8946fd2e/disk-0.vmdk is in state: ready. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 957.964248] env[63538]: ERROR oslo_vmware.rw_handles [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5293335c-db27-60ad-45f1-802f8946fd2e/disk-0.vmdk due to incomplete transfer. [ 957.964248] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b2bcd0c8-2245-4b30-b931-d1bacd2bf7de {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.974994] env[63538]: DEBUG oslo_vmware.rw_handles [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5293335c-db27-60ad-45f1-802f8946fd2e/disk-0.vmdk. {{(pid=63538) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 957.974994] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Uploaded image b7292989-b20d-44bb-b54a-80b3b3bed442 to the Glance image server {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 957.976125] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Destroying the VM {{(pid=63538) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 957.977018] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2608525f-7082-4b5c-9050-0076aec40301 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.984710] env[63538]: DEBUG oslo_vmware.api [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 957.984710] env[63538]: value = "task-5101315" [ 957.984710] env[63538]: _type = "Task" [ 957.984710] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.991917] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-04b125e5-2d9f-421b-a1cd-7ccc9199d91f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.000245] env[63538]: DEBUG oslo_vmware.api [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101315, 'name': Destroy_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.010561] env[63538]: DEBUG nova.compute.manager [req-7c7ab66d-02c7-4d23-aa4e-a519934fee48 req-4447c514-0dec-4675-bd5d-ff7a39233ef8 service nova] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Received event network-vif-deleted-47d19b83-6292-46e2-835f-1198ef52374c {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 958.302347] env[63538]: DEBUG nova.compute.utils [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 958.303944] env[63538]: DEBUG nova.compute.manager [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 958.307491] env[63538]: DEBUG nova.network.neutron [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 958.386334] env[63538]: DEBUG nova.policy [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '884364daa8f746fb9b32a8b33c9e2cfd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea05f3fb4676466bb2a286f5a2fefb8f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 958.402634] env[63538]: DEBUG oslo_vmware.api [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101314, 'name': Destroy_Task, 'duration_secs': 0.561667} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.403721] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Destroyed the VM [ 958.404408] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Deleting Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 958.404904] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-cb2ff419-836b-4a60-a710-aa831a22eaec {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.414378] env[63538]: DEBUG oslo_vmware.api [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 958.414378] env[63538]: value = "task-5101316" [ 958.414378] env[63538]: _type = "Task" [ 958.414378] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.425726] env[63538]: DEBUG oslo_vmware.api [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101316, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.504792] env[63538]: DEBUG oslo_vmware.api [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101315, 'name': Destroy_Task} progress is 33%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.793126] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.793370] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.808610] env[63538]: DEBUG nova.compute.manager [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 958.931712] env[63538]: DEBUG oslo_vmware.api [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101316, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.998434] env[63538]: DEBUG nova.objects.instance [None req-b0ecdd4d-280e-4882-8e57-5da7f8325e52 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lazy-loading 'flavor' on Instance uuid edcc5700-7b1e-494a-82d1-844373a9d5a6 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 959.010142] env[63538]: DEBUG oslo_vmware.api [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101315, 'name': Destroy_Task, 'duration_secs': 0.704532} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.010142] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Destroyed the VM [ 959.010142] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Deleting Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 959.010142] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-65e976c2-8f8d-4e80-952c-0802485d5b2b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.021904] env[63538]: DEBUG oslo_vmware.api [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 959.021904] env[63538]: value = "task-5101317" [ 959.021904] env[63538]: _type = "Task" [ 959.021904] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.038065] env[63538]: DEBUG oslo_vmware.api [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101317, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.279412] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4990561-3c56-4794-a503-2beb4774713f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.291228] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f36412-421b-4e38-a38e-b435aac7be95 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.296798] env[63538]: DEBUG nova.compute.manager [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 959.339728] env[63538]: DEBUG nova.network.neutron [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Successfully created port: d1b61459-5c9a-429e-aa08-d0d2d0c5c846 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 959.348630] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20af62e0-a3d9-4c35-8554-2f31d68901d6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.359414] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03464671-acd2-47c4-8a93-00a84ca5296a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.378934] env[63538]: DEBUG nova.compute.provider_tree [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 959.428064] env[63538]: DEBUG oslo_vmware.api [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101316, 'name': RemoveSnapshot_Task, 'duration_secs': 0.736953} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.428376] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Deleted Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 959.428528] env[63538]: INFO nova.compute.manager [None req-8ea9a3d1-68d0-44a6-811b-32483fa0e069 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Took 15.02 seconds to snapshot the instance on the hypervisor. [ 959.507870] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b0ecdd4d-280e-4882-8e57-5da7f8325e52 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "edcc5700-7b1e-494a-82d1-844373a9d5a6" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.352s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.537998] env[63538]: DEBUG oslo_vmware.api [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101317, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.824398] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.848188] env[63538]: DEBUG nova.compute.manager [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 959.885775] env[63538]: DEBUG nova.scheduler.client.report [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 959.894414] env[63538]: DEBUG nova.virt.hardware [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 959.896571] env[63538]: DEBUG nova.virt.hardware [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 959.896571] env[63538]: DEBUG nova.virt.hardware [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 959.896571] env[63538]: DEBUG nova.virt.hardware [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 959.896571] env[63538]: DEBUG nova.virt.hardware [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 959.896571] env[63538]: DEBUG nova.virt.hardware [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 959.896571] env[63538]: DEBUG nova.virt.hardware [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 959.896571] env[63538]: DEBUG nova.virt.hardware [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 959.896571] env[63538]: DEBUG nova.virt.hardware [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 959.896571] env[63538]: DEBUG nova.virt.hardware [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 959.896986] env[63538]: DEBUG nova.virt.hardware [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 959.899900] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-959f021f-4453-45dc-87ec-a89c4b6d6171 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.909221] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e11c64-2dc7-4288-970f-26062cad57ac {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.035572] env[63538]: DEBUG oslo_vmware.api [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101317, 'name': RemoveSnapshot_Task, 'duration_secs': 0.731667} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.036061] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Deleted Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 960.036482] env[63538]: DEBUG nova.compute.manager [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 960.037453] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1a83c2d-69e4-46e4-a31b-f2f41cffe90b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.215116] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "0339c969-ad97-47b1-8fab-ee595738d9df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.215116] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "0339c969-ad97-47b1-8fab-ee595738d9df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.247310] env[63538]: INFO nova.compute.manager [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Rescuing [ 960.247310] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "refresh_cache-edcc5700-7b1e-494a-82d1-844373a9d5a6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.247310] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquired lock "refresh_cache-edcc5700-7b1e-494a-82d1-844373a9d5a6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.247310] env[63538]: DEBUG nova.network.neutron [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 960.271765] env[63538]: DEBUG nova.compute.manager [req-bc2c725c-16d6-4f4d-a076-cbb229c2a098 req-3761f43a-671e-4b5c-9c01-98f8642778c7 service nova] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Received event network-changed-da4e2c60-afe9-437a-ba0d-55b9b358ae8f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 960.272020] env[63538]: DEBUG nova.compute.manager [req-bc2c725c-16d6-4f4d-a076-cbb229c2a098 req-3761f43a-671e-4b5c-9c01-98f8642778c7 service nova] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Refreshing instance network info cache due to event network-changed-da4e2c60-afe9-437a-ba0d-55b9b358ae8f. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 960.272193] env[63538]: DEBUG oslo_concurrency.lockutils [req-bc2c725c-16d6-4f4d-a076-cbb229c2a098 req-3761f43a-671e-4b5c-9c01-98f8642778c7 service nova] Acquiring lock "refresh_cache-8097cb1c-bbba-45a8-be81-64d38decb1df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.272339] env[63538]: DEBUG oslo_concurrency.lockutils [req-bc2c725c-16d6-4f4d-a076-cbb229c2a098 req-3761f43a-671e-4b5c-9c01-98f8642778c7 service nova] Acquired lock "refresh_cache-8097cb1c-bbba-45a8-be81-64d38decb1df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.272504] env[63538]: DEBUG nova.network.neutron [req-bc2c725c-16d6-4f4d-a076-cbb229c2a098 req-3761f43a-671e-4b5c-9c01-98f8642778c7 service nova] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Refreshing network info cache for port da4e2c60-afe9-437a-ba0d-55b9b358ae8f {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 960.394776] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.594s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.395743] env[63538]: DEBUG nova.compute.manager [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 960.402801] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 23.046s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.553441] env[63538]: INFO nova.compute.manager [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Shelve offloading [ 960.556499] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 960.556799] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c8055146-554f-4e8c-95a1-9c9ac5846e02 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.565919] env[63538]: DEBUG oslo_vmware.api [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 960.565919] env[63538]: value = "task-5101318" [ 960.565919] env[63538]: _type = "Task" [ 960.565919] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.578551] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] VM already powered off {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 960.578775] env[63538]: DEBUG nova.compute.manager [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 960.580209] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7915df6-1e4d-4dd5-897e-80c10f220522 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.589051] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "refresh_cache-2e97b357-0200-4aed-9705-dd7808f853ba" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.589051] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired lock "refresh_cache-2e97b357-0200-4aed-9705-dd7808f853ba" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.589245] env[63538]: DEBUG nova.network.neutron [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 960.717145] env[63538]: DEBUG nova.compute.manager [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 960.905607] env[63538]: DEBUG nova.compute.utils [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 960.910604] env[63538]: INFO nova.compute.claims [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 960.923329] env[63538]: DEBUG nova.compute.manager [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 960.923622] env[63538]: DEBUG nova.network.neutron [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 961.014848] env[63538]: DEBUG nova.policy [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '861014e7810d4cf59cfa061acbb8f7eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4efc4733ea894fb7825e52b29ac8b6ba', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 961.248059] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.285954] env[63538]: DEBUG nova.network.neutron [req-bc2c725c-16d6-4f4d-a076-cbb229c2a098 req-3761f43a-671e-4b5c-9c01-98f8642778c7 service nova] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Updated VIF entry in instance network info cache for port da4e2c60-afe9-437a-ba0d-55b9b358ae8f. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 961.286825] env[63538]: DEBUG nova.network.neutron [req-bc2c725c-16d6-4f4d-a076-cbb229c2a098 req-3761f43a-671e-4b5c-9c01-98f8642778c7 service nova] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Updating instance_info_cache with network_info: [{"id": "da4e2c60-afe9-437a-ba0d-55b9b358ae8f", "address": "fa:16:3e:8a:cb:bc", "network": {"id": "8497b0d5-d275-4479-ae83-3ef4a1bb795b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-849919494-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3f6e933bf6c4e71af3b2a1e02d6e42f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ee018eb-75be-4037-a80a-07034d4eae35", "external-id": "nsx-vlan-transportzone-8", "segmentation_id": 8, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda4e2c60-af", "ovs_interfaceid": "da4e2c60-afe9-437a-ba0d-55b9b358ae8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.318752] env[63538]: DEBUG nova.network.neutron [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Updating instance_info_cache with network_info: [{"id": "cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189", "address": "fa:16:3e:2d:31:f0", "network": {"id": "549f45d6-3d4f-4476-92ba-4bd87fefba51", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-822672880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55edcd65da7b4a569a4c27aab4819cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ded8bac-871f-491b-94ec-cb67c08bc828", "external-id": "nsx-vlan-transportzone-212", "segmentation_id": 212, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcff0fa7b-e0", "ovs_interfaceid": "cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.422061] env[63538]: DEBUG nova.compute.manager [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 961.427715] env[63538]: INFO nova.compute.resource_tracker [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Updating resource usage from migration 798396a5-59d2-4cd0-956e-72af70feb5c4 [ 961.489249] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dc27d139-01eb-40ff-aa0d-97c28dd1e1fa tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquiring lock "8097cb1c-bbba-45a8-be81-64d38decb1df" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.489688] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dc27d139-01eb-40ff-aa0d-97c28dd1e1fa tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Lock "8097cb1c-bbba-45a8-be81-64d38decb1df" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.489688] env[63538]: INFO nova.compute.manager [None req-dc27d139-01eb-40ff-aa0d-97c28dd1e1fa tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Rebooting instance [ 961.633748] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "fb26fb32-a420-4667-850c-e32786edd8f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.634401] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "fb26fb32-a420-4667-850c-e32786edd8f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.779823] env[63538]: DEBUG nova.network.neutron [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Updating instance_info_cache with network_info: [{"id": "5a3ccff0-6550-429c-a4ce-0afa4c25230f", "address": "fa:16:3e:eb:33:f1", "network": {"id": "01450801-f549-4844-80bd-208ec405c65f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684310085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "492427e54e1048f292dab2abdac71af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a3ccff0-65", "ovs_interfaceid": "5a3ccff0-6550-429c-a4ce-0afa4c25230f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.793030] env[63538]: DEBUG oslo_concurrency.lockutils [req-bc2c725c-16d6-4f4d-a076-cbb229c2a098 req-3761f43a-671e-4b5c-9c01-98f8642778c7 service nova] Releasing lock "refresh_cache-8097cb1c-bbba-45a8-be81-64d38decb1df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.824178] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Releasing lock "refresh_cache-edcc5700-7b1e-494a-82d1-844373a9d5a6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.869445] env[63538]: DEBUG nova.network.neutron [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Successfully created port: 67d24d11-311f-4e9b-90b8-20569417ffb1 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 961.879981] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fabf3bf4-b3e5-4616-ad18-ac99c9e8d8a3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.890206] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e36514-0a78-4ee1-9ad4-51d3a5896b50 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.894509] env[63538]: DEBUG nova.network.neutron [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Successfully updated port: d1b61459-5c9a-429e-aa08-d0d2d0c5c846 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 961.928060] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb868271-b64c-4da9-b352-a46f4a1517cd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.941975] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3348ae7d-1165-46c8-9d10-d675164d0edb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.957537] env[63538]: DEBUG nova.compute.provider_tree [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 962.007871] env[63538]: DEBUG nova.compute.manager [req-8405389e-508a-4563-b9a4-ec3dd5a7e804 req-6dd123a5-eb92-4a84-a95e-c2bfdc7c4c97 service nova] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Received event network-vif-plugged-d1b61459-5c9a-429e-aa08-d0d2d0c5c846 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 962.008010] env[63538]: DEBUG oslo_concurrency.lockutils [req-8405389e-508a-4563-b9a4-ec3dd5a7e804 req-6dd123a5-eb92-4a84-a95e-c2bfdc7c4c97 service nova] Acquiring lock "cf72ac3d-4051-428a-b5bc-7f28accb13c0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.008278] env[63538]: DEBUG oslo_concurrency.lockutils [req-8405389e-508a-4563-b9a4-ec3dd5a7e804 req-6dd123a5-eb92-4a84-a95e-c2bfdc7c4c97 service nova] Lock "cf72ac3d-4051-428a-b5bc-7f28accb13c0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.008464] env[63538]: DEBUG oslo_concurrency.lockutils [req-8405389e-508a-4563-b9a4-ec3dd5a7e804 req-6dd123a5-eb92-4a84-a95e-c2bfdc7c4c97 service nova] Lock "cf72ac3d-4051-428a-b5bc-7f28accb13c0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.008693] env[63538]: DEBUG nova.compute.manager [req-8405389e-508a-4563-b9a4-ec3dd5a7e804 req-6dd123a5-eb92-4a84-a95e-c2bfdc7c4c97 service nova] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] No waiting events found dispatching network-vif-plugged-d1b61459-5c9a-429e-aa08-d0d2d0c5c846 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 962.008816] env[63538]: WARNING nova.compute.manager [req-8405389e-508a-4563-b9a4-ec3dd5a7e804 req-6dd123a5-eb92-4a84-a95e-c2bfdc7c4c97 service nova] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Received unexpected event network-vif-plugged-d1b61459-5c9a-429e-aa08-d0d2d0c5c846 for instance with vm_state building and task_state spawning. [ 962.010318] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dc27d139-01eb-40ff-aa0d-97c28dd1e1fa tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquiring lock "refresh_cache-8097cb1c-bbba-45a8-be81-64d38decb1df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.010486] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dc27d139-01eb-40ff-aa0d-97c28dd1e1fa tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquired lock "refresh_cache-8097cb1c-bbba-45a8-be81-64d38decb1df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.010657] env[63538]: DEBUG nova.network.neutron [None req-dc27d139-01eb-40ff-aa0d-97c28dd1e1fa tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 962.137336] env[63538]: DEBUG nova.compute.manager [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 962.282897] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Releasing lock "refresh_cache-2e97b357-0200-4aed-9705-dd7808f853ba" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 962.367581] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 962.368903] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c64697e-cd06-4640-b291-adc8e4f9a81c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.377064] env[63538]: DEBUG oslo_vmware.api [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 962.377064] env[63538]: value = "task-5101319" [ 962.377064] env[63538]: _type = "Task" [ 962.377064] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.392638] env[63538]: DEBUG oslo_vmware.api [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101319, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.399328] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "refresh_cache-cf72ac3d-4051-428a-b5bc-7f28accb13c0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.399328] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired lock "refresh_cache-cf72ac3d-4051-428a-b5bc-7f28accb13c0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.399328] env[63538]: DEBUG nova.network.neutron [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 962.437502] env[63538]: DEBUG nova.compute.manager [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 962.463387] env[63538]: DEBUG nova.scheduler.client.report [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 962.478568] env[63538]: DEBUG nova.virt.hardware [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 962.478822] env[63538]: DEBUG nova.virt.hardware [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 962.479105] env[63538]: DEBUG nova.virt.hardware [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 962.480144] env[63538]: DEBUG nova.virt.hardware [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 962.480144] env[63538]: DEBUG nova.virt.hardware [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 962.480259] env[63538]: DEBUG nova.virt.hardware [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 962.480659] env[63538]: DEBUG nova.virt.hardware [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 962.480901] env[63538]: DEBUG nova.virt.hardware [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 962.481173] env[63538]: DEBUG nova.virt.hardware [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 962.481472] env[63538]: DEBUG nova.virt.hardware [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 962.481693] env[63538]: DEBUG nova.virt.hardware [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 962.484345] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aeb3c93-69df-42cf-a476-9b3944014f27 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.496414] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48bb1503-43c8-4c3a-ad97-dfc9bf53403b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.661927] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.805834] env[63538]: DEBUG nova.network.neutron [None req-dc27d139-01eb-40ff-aa0d-97c28dd1e1fa tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Updating instance_info_cache with network_info: [{"id": "da4e2c60-afe9-437a-ba0d-55b9b358ae8f", "address": "fa:16:3e:8a:cb:bc", "network": {"id": "8497b0d5-d275-4479-ae83-3ef4a1bb795b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-849919494-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3f6e933bf6c4e71af3b2a1e02d6e42f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ee018eb-75be-4037-a80a-07034d4eae35", "external-id": "nsx-vlan-transportzone-8", "segmentation_id": 8, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda4e2c60-af", "ovs_interfaceid": "da4e2c60-afe9-437a-ba0d-55b9b358ae8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.894907] env[63538]: DEBUG oslo_vmware.api [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101319, 'name': PowerOffVM_Task, 'duration_secs': 0.441755} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.895327] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 962.896264] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c01b22-b9e2-466a-bdf5-c9c43b9234f6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.920475] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88935c27-b7d9-4e6d-9aaa-be4dd3a0fa32 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.949859] env[63538]: DEBUG nova.network.neutron [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 962.955708] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 962.956040] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c3e85f05-a3e8-4c98-bd34-ce4765649805 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.964625] env[63538]: DEBUG oslo_vmware.api [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 962.964625] env[63538]: value = "task-5101320" [ 962.964625] env[63538]: _type = "Task" [ 962.964625] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.973808] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.575s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.974035] env[63538]: INFO nova.compute.manager [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Migrating [ 962.987918] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.904s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.988200] env[63538]: DEBUG nova.objects.instance [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Lazy-loading 'resources' on Instance uuid e447c109-4cef-4cc7-9acf-61abc0f47482 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 962.997249] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] VM already powered off {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 962.997477] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 962.997735] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.997906] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.998079] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 962.998617] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-79e244ea-8c17-4c6f-aa18-6aef18c5ac29 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.011552] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 963.012661] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 963.013754] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f057d6ff-24fb-4a01-b00a-c97f6d35516a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.022113] env[63538]: DEBUG oslo_vmware.api [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 963.022113] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a31f31-4299-f9a3-975f-6433a79b710d" [ 963.022113] env[63538]: _type = "Task" [ 963.022113] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.039521] env[63538]: DEBUG oslo_vmware.api [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a31f31-4299-f9a3-975f-6433a79b710d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.145151] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 963.146236] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11041197-805f-430a-b5d2-63edaf774c8d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.162740] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 963.163237] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fabd99c9-7f06-4317-84ec-9082122559ed {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.195592] env[63538]: DEBUG nova.network.neutron [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Updating instance_info_cache with network_info: [{"id": "d1b61459-5c9a-429e-aa08-d0d2d0c5c846", "address": "fa:16:3e:70:39:28", "network": {"id": "4aa7976c-5900-4be2-a5e2-8d641ac24be9", "bridge": "br-int", "label": "tempest-ServersTestJSON-119331503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea05f3fb4676466bb2a286f5a2fefb8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1b61459-5c", "ovs_interfaceid": "d1b61459-5c9a-429e-aa08-d0d2d0c5c846", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.255118] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 963.255634] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 963.255899] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Deleting the datastore file [datastore1] 2e97b357-0200-4aed-9705-dd7808f853ba {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 963.255950] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-842bb8a7-bd17-4f1c-bf25-f2c59becaf06 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.266882] env[63538]: DEBUG oslo_vmware.api [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 963.266882] env[63538]: value = "task-5101322" [ 963.266882] env[63538]: _type = "Task" [ 963.266882] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.276774] env[63538]: DEBUG oslo_vmware.api [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101322, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.309893] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dc27d139-01eb-40ff-aa0d-97c28dd1e1fa tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Releasing lock "refresh_cache-8097cb1c-bbba-45a8-be81-64d38decb1df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 963.312697] env[63538]: DEBUG nova.compute.manager [None req-dc27d139-01eb-40ff-aa0d-97c28dd1e1fa tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 963.313820] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4881e8b-66ad-4ebd-926e-3a7ed6fa6072 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.500253] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "refresh_cache-4ec5d3a2-8b29-4074-b323-f94704043b8b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 963.504249] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired lock "refresh_cache-4ec5d3a2-8b29-4074-b323-f94704043b8b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.504249] env[63538]: DEBUG nova.network.neutron [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 963.534187] env[63538]: DEBUG oslo_vmware.api [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a31f31-4299-f9a3-975f-6433a79b710d, 'name': SearchDatastore_Task, 'duration_secs': 0.019941} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.535072] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cfc6a68a-ffcb-4b85-96b3-a7f56c8ae1e8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.541095] env[63538]: DEBUG oslo_vmware.api [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 963.541095] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520d036a-02b3-a1c5-3b0f-ee6580c722ef" [ 963.541095] env[63538]: _type = "Task" [ 963.541095] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.550642] env[63538]: DEBUG oslo_vmware.api [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520d036a-02b3-a1c5-3b0f-ee6580c722ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.747752] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Releasing lock "refresh_cache-cf72ac3d-4051-428a-b5bc-7f28accb13c0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 963.747752] env[63538]: DEBUG nova.compute.manager [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Instance network_info: |[{"id": "d1b61459-5c9a-429e-aa08-d0d2d0c5c846", "address": "fa:16:3e:70:39:28", "network": {"id": "4aa7976c-5900-4be2-a5e2-8d641ac24be9", "bridge": "br-int", "label": "tempest-ServersTestJSON-119331503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea05f3fb4676466bb2a286f5a2fefb8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1b61459-5c", "ovs_interfaceid": "d1b61459-5c9a-429e-aa08-d0d2d0c5c846", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 963.747752] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:39:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd1b61459-5c9a-429e-aa08-d0d2d0c5c846', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 963.747752] env[63538]: DEBUG oslo.service.loopingcall [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 963.747752] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 963.747752] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0e7c2b54-34b8-4baa-bb35-4dfe1be18473 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.750148] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 963.750148] env[63538]: value = "task-5101323" [ 963.750148] env[63538]: _type = "Task" [ 963.750148] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.761618] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101323, 'name': CreateVM_Task} progress is 15%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.781251] env[63538]: DEBUG oslo_vmware.api [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101322, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.256801} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.781251] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 963.781251] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 963.781452] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 963.815337] env[63538]: INFO nova.scheduler.client.report [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Deleted allocations for instance 2e97b357-0200-4aed-9705-dd7808f853ba [ 964.056752] env[63538]: DEBUG oslo_vmware.api [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520d036a-02b3-a1c5-3b0f-ee6580c722ef, 'name': SearchDatastore_Task, 'duration_secs': 0.010596} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.060965] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 964.060965] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] edcc5700-7b1e-494a-82d1-844373a9d5a6/faabbca4-e27b-433a-b93d-f059fd73bc92-rescue.vmdk. {{(pid=63538) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 964.060965] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e8262f0e-5e53-4f66-a14e-bf28f8e38292 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.066349] env[63538]: DEBUG oslo_vmware.api [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 964.066349] env[63538]: value = "task-5101324" [ 964.066349] env[63538]: _type = "Task" [ 964.066349] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.082683] env[63538]: DEBUG oslo_vmware.api [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101324, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.246131] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c146a8-cec4-4260-999c-a67aa27fc991 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.259828] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e12c3acb-ccf8-49f7-bcdf-99d1d00eda5f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.268189] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101323, 'name': CreateVM_Task, 'duration_secs': 0.39469} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.268902] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 964.269673] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 964.270027] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.270215] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 964.270502] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ece1e7dd-a6ff-4308-8cf7-56db44b17240 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.313745] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-156a093b-ed32-4e91-8e44-34d31f832f58 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.318064] env[63538]: DEBUG nova.compute.manager [req-bd2acf85-2318-4ec6-a582-bccea2eed37a req-1458e1a2-2385-4bf5-b2c3-d1a335e8a4f0 service nova] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Received event network-vif-plugged-67d24d11-311f-4e9b-90b8-20569417ffb1 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 964.318269] env[63538]: DEBUG oslo_concurrency.lockutils [req-bd2acf85-2318-4ec6-a582-bccea2eed37a req-1458e1a2-2385-4bf5-b2c3-d1a335e8a4f0 service nova] Acquiring lock "6bc30d96-8056-421c-875b-c24488e5f595-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.318503] env[63538]: DEBUG oslo_concurrency.lockutils [req-bd2acf85-2318-4ec6-a582-bccea2eed37a req-1458e1a2-2385-4bf5-b2c3-d1a335e8a4f0 service nova] Lock "6bc30d96-8056-421c-875b-c24488e5f595-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.318716] env[63538]: DEBUG oslo_concurrency.lockutils [req-bd2acf85-2318-4ec6-a582-bccea2eed37a req-1458e1a2-2385-4bf5-b2c3-d1a335e8a4f0 service nova] Lock "6bc30d96-8056-421c-875b-c24488e5f595-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.318858] env[63538]: DEBUG nova.compute.manager [req-bd2acf85-2318-4ec6-a582-bccea2eed37a req-1458e1a2-2385-4bf5-b2c3-d1a335e8a4f0 service nova] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] No waiting events found dispatching network-vif-plugged-67d24d11-311f-4e9b-90b8-20569417ffb1 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 964.319024] env[63538]: WARNING nova.compute.manager [req-bd2acf85-2318-4ec6-a582-bccea2eed37a req-1458e1a2-2385-4bf5-b2c3-d1a335e8a4f0 service nova] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Received unexpected event network-vif-plugged-67d24d11-311f-4e9b-90b8-20569417ffb1 for instance with vm_state building and task_state spawning. [ 964.322013] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.322494] env[63538]: DEBUG oslo_vmware.api [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 964.322494] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c49318-e9ad-51da-665a-30b13cadf776" [ 964.322494] env[63538]: _type = "Task" [ 964.322494] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.332019] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24bece98-2fdb-456c-9405-346d3f45f8ed {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.345780] env[63538]: DEBUG oslo_vmware.api [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c49318-e9ad-51da-665a-30b13cadf776, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.348803] env[63538]: DEBUG nova.compute.manager [req-9f7889ca-204d-4882-b64b-6b8d029d9834 req-ff1ca148-5164-4efd-acc1-9a52537ab894 service nova] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Received event network-changed-d1b61459-5c9a-429e-aa08-d0d2d0c5c846 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 964.348803] env[63538]: DEBUG nova.compute.manager [req-9f7889ca-204d-4882-b64b-6b8d029d9834 req-ff1ca148-5164-4efd-acc1-9a52537ab894 service nova] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Refreshing instance network info cache due to event network-changed-d1b61459-5c9a-429e-aa08-d0d2d0c5c846. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 964.348803] env[63538]: DEBUG oslo_concurrency.lockutils [req-9f7889ca-204d-4882-b64b-6b8d029d9834 req-ff1ca148-5164-4efd-acc1-9a52537ab894 service nova] Acquiring lock "refresh_cache-cf72ac3d-4051-428a-b5bc-7f28accb13c0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 964.348803] env[63538]: DEBUG oslo_concurrency.lockutils [req-9f7889ca-204d-4882-b64b-6b8d029d9834 req-ff1ca148-5164-4efd-acc1-9a52537ab894 service nova] Acquired lock "refresh_cache-cf72ac3d-4051-428a-b5bc-7f28accb13c0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.348803] env[63538]: DEBUG nova.network.neutron [req-9f7889ca-204d-4882-b64b-6b8d029d9834 req-ff1ca148-5164-4efd-acc1-9a52537ab894 service nova] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Refreshing network info cache for port d1b61459-5c9a-429e-aa08-d0d2d0c5c846 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 964.351725] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c48d12cc-b482-4eba-94d8-659ea04996d3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.374431] env[63538]: DEBUG nova.compute.provider_tree [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 964.378043] env[63538]: DEBUG nova.network.neutron [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Successfully updated port: 67d24d11-311f-4e9b-90b8-20569417ffb1 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 964.382360] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-dc27d139-01eb-40ff-aa0d-97c28dd1e1fa tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Doing hard reboot of VM {{(pid=63538) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1064}} [ 964.383331] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-fec9d784-23cc-4d46-a25a-86ae1933dfb8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.393185] env[63538]: DEBUG oslo_vmware.api [None req-dc27d139-01eb-40ff-aa0d-97c28dd1e1fa tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Waiting for the task: (returnval){ [ 964.393185] env[63538]: value = "task-5101325" [ 964.393185] env[63538]: _type = "Task" [ 964.393185] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.260561] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquiring lock "refresh_cache-6bc30d96-8056-421c-875b-c24488e5f595" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.260945] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquired lock "refresh_cache-6bc30d96-8056-421c-875b-c24488e5f595" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.260945] env[63538]: DEBUG nova.network.neutron [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 965.263195] env[63538]: DEBUG nova.scheduler.client.report [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 965.283406] env[63538]: DEBUG oslo_vmware.api [None req-dc27d139-01eb-40ff-aa0d-97c28dd1e1fa tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101325, 'name': ResetVM_Task, 'duration_secs': 0.136761} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.291121] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-dc27d139-01eb-40ff-aa0d-97c28dd1e1fa tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Did hard reboot of VM {{(pid=63538) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1068}} [ 965.291311] env[63538]: DEBUG nova.compute.manager [None req-dc27d139-01eb-40ff-aa0d-97c28dd1e1fa tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 965.292952] env[63538]: DEBUG oslo_vmware.api [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c49318-e9ad-51da-665a-30b13cadf776, 'name': SearchDatastore_Task, 'duration_secs': 0.06195} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.293210] env[63538]: DEBUG oslo_vmware.api [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101324, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.541304} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.293941] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b1418f-b5ed-4698-9665-bd2d7a03b366 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.297070] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.297286] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 965.297530] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.297679] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.297858] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 965.298141] env[63538]: INFO nova.virt.vmwareapi.ds_util [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] edcc5700-7b1e-494a-82d1-844373a9d5a6/faabbca4-e27b-433a-b93d-f059fd73bc92-rescue.vmdk. [ 965.298672] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c7b9190-9c02-4461-a081-a075430e1a05 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.301013] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24dde571-c213-4c72-b711-b696dd9af636 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.338297] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] edcc5700-7b1e-494a-82d1-844373a9d5a6/faabbca4-e27b-433a-b93d-f059fd73bc92-rescue.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 965.339881] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a21ede10-b7b6-45c1-8b34-e61f4a9abc07 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.357239] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 965.357239] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 965.357239] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6df13612-ed14-4836-86bc-a9e1965b6033 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.364487] env[63538]: DEBUG oslo_vmware.api [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 965.364487] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521c86f4-6bd0-e739-5df0-8d143fa4be89" [ 965.364487] env[63538]: _type = "Task" [ 965.364487] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.367897] env[63538]: DEBUG oslo_vmware.api [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 965.367897] env[63538]: value = "task-5101326" [ 965.367897] env[63538]: _type = "Task" [ 965.367897] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.381281] env[63538]: DEBUG oslo_vmware.api [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521c86f4-6bd0-e739-5df0-8d143fa4be89, 'name': SearchDatastore_Task, 'duration_secs': 0.013268} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.388424] env[63538]: DEBUG oslo_vmware.api [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101326, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.388750] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-beb30a1c-8bf5-488e-8a74-d35d44c0de8c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.395621] env[63538]: DEBUG oslo_vmware.api [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 965.395621] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52727f9a-6866-f907-3f96-c7dcc3f627ed" [ 965.395621] env[63538]: _type = "Task" [ 965.395621] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.407625] env[63538]: DEBUG oslo_vmware.api [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52727f9a-6866-f907-3f96-c7dcc3f627ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.432661] env[63538]: DEBUG nova.network.neutron [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Updating instance_info_cache with network_info: [{"id": "31cf3b33-b97d-4183-a21c-80e24e27351f", "address": "fa:16:3e:e0:a1:3e", "network": {"id": "8facc2ff-7a17-4beb-ad4f-7cf9d95cc93c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-137157780-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a701618902d411b8af203fdbb1069be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31cf3b33-b9", "ovs_interfaceid": "31cf3b33-b97d-4183-a21c-80e24e27351f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.773233] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.783s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.773750] env[63538]: DEBUG oslo_concurrency.lockutils [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.722s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.774285] env[63538]: DEBUG nova.objects.instance [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lazy-loading 'resources' on Instance uuid 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 965.783813] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fbc810de-59fd-42b2-92f5-ca6dfc6c1b51 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "2e97b357-0200-4aed-9705-dd7808f853ba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.796437] env[63538]: INFO nova.scheduler.client.report [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Deleted allocations for instance e447c109-4cef-4cc7-9acf-61abc0f47482 [ 965.818196] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dc27d139-01eb-40ff-aa0d-97c28dd1e1fa tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Lock "8097cb1c-bbba-45a8-be81-64d38decb1df" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.328s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.858276] env[63538]: DEBUG nova.network.neutron [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 965.882939] env[63538]: DEBUG oslo_vmware.api [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101326, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.907634] env[63538]: DEBUG oslo_vmware.api [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52727f9a-6866-f907-3f96-c7dcc3f627ed, 'name': SearchDatastore_Task, 'duration_secs': 0.012163} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.911493] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.911973] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] cf72ac3d-4051-428a-b5bc-7f28accb13c0/cf72ac3d-4051-428a-b5bc-7f28accb13c0.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 965.912751] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a47fa22-959b-40b6-a562-ecf66c0e812a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.923019] env[63538]: DEBUG oslo_vmware.api [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 965.923019] env[63538]: value = "task-5101327" [ 965.923019] env[63538]: _type = "Task" [ 965.923019] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.931839] env[63538]: DEBUG oslo_vmware.api [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101327, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.936659] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Releasing lock "refresh_cache-4ec5d3a2-8b29-4074-b323-f94704043b8b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.259476] env[63538]: DEBUG nova.network.neutron [req-9f7889ca-204d-4882-b64b-6b8d029d9834 req-ff1ca148-5164-4efd-acc1-9a52537ab894 service nova] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Updated VIF entry in instance network info cache for port d1b61459-5c9a-429e-aa08-d0d2d0c5c846. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 966.259933] env[63538]: DEBUG nova.network.neutron [req-9f7889ca-204d-4882-b64b-6b8d029d9834 req-ff1ca148-5164-4efd-acc1-9a52537ab894 service nova] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Updating instance_info_cache with network_info: [{"id": "d1b61459-5c9a-429e-aa08-d0d2d0c5c846", "address": "fa:16:3e:70:39:28", "network": {"id": "4aa7976c-5900-4be2-a5e2-8d641ac24be9", "bridge": "br-int", "label": "tempest-ServersTestJSON-119331503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea05f3fb4676466bb2a286f5a2fefb8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1b61459-5c", "ovs_interfaceid": "d1b61459-5c9a-429e-aa08-d0d2d0c5c846", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.310277] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b4286091-db48-4114-8322-88a90fc0e91e tempest-ServerRescueTestJSONUnderV235-1693910194 tempest-ServerRescueTestJSONUnderV235-1693910194-project-member] Lock "e447c109-4cef-4cc7-9acf-61abc0f47482" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.133s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.329933] env[63538]: DEBUG nova.network.neutron [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Updating instance_info_cache with network_info: [{"id": "67d24d11-311f-4e9b-90b8-20569417ffb1", "address": "fa:16:3e:17:74:6c", "network": {"id": "19a71225-10fa-49f2-ac67-af1873417755", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1401406561-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4efc4733ea894fb7825e52b29ac8b6ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d39252e-42ef-4252-98d3-62af5a0d109d", "external-id": "nsx-vlan-transportzone-190", "segmentation_id": 190, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67d24d11-31", "ovs_interfaceid": "67d24d11-311f-4e9b-90b8-20569417ffb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.392297] env[63538]: DEBUG oslo_vmware.api [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101326, 'name': ReconfigVM_Task, 'duration_secs': 0.790561} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.393033] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Reconfigured VM instance instance-00000048 to attach disk [datastore2] edcc5700-7b1e-494a-82d1-844373a9d5a6/faabbca4-e27b-433a-b93d-f059fd73bc92-rescue.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 966.395225] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6cbda4-8970-43d4-8017-5253250269b7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.436825] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7d7a33f-7774-4650-b46e-fe2455ca3818 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.460485] env[63538]: DEBUG oslo_vmware.api [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101327, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.465034] env[63538]: DEBUG oslo_vmware.api [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 966.465034] env[63538]: value = "task-5101328" [ 966.465034] env[63538]: _type = "Task" [ 966.465034] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.478156] env[63538]: DEBUG oslo_vmware.api [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101328, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.677941] env[63538]: DEBUG nova.compute.manager [req-cda542e9-ff94-4b97-8d84-d53657a9223d req-8f945d63-57b3-4b22-8047-4110f27c9f98 service nova] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Received event network-changed-67d24d11-311f-4e9b-90b8-20569417ffb1 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 966.678894] env[63538]: DEBUG nova.compute.manager [req-cda542e9-ff94-4b97-8d84-d53657a9223d req-8f945d63-57b3-4b22-8047-4110f27c9f98 service nova] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Refreshing instance network info cache due to event network-changed-67d24d11-311f-4e9b-90b8-20569417ffb1. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 966.678894] env[63538]: DEBUG oslo_concurrency.lockutils [req-cda542e9-ff94-4b97-8d84-d53657a9223d req-8f945d63-57b3-4b22-8047-4110f27c9f98 service nova] Acquiring lock "refresh_cache-6bc30d96-8056-421c-875b-c24488e5f595" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.759509] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e9cbd8-90c5-4b86-bef2-880db18aaf91 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.765030] env[63538]: DEBUG oslo_concurrency.lockutils [req-9f7889ca-204d-4882-b64b-6b8d029d9834 req-ff1ca148-5164-4efd-acc1-9a52537ab894 service nova] Releasing lock "refresh_cache-cf72ac3d-4051-428a-b5bc-7f28accb13c0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.765287] env[63538]: DEBUG nova.compute.manager [req-9f7889ca-204d-4882-b64b-6b8d029d9834 req-ff1ca148-5164-4efd-acc1-9a52537ab894 service nova] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Received event network-vif-unplugged-5a3ccff0-6550-429c-a4ce-0afa4c25230f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 966.765503] env[63538]: DEBUG oslo_concurrency.lockutils [req-9f7889ca-204d-4882-b64b-6b8d029d9834 req-ff1ca148-5164-4efd-acc1-9a52537ab894 service nova] Acquiring lock "2e97b357-0200-4aed-9705-dd7808f853ba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.765705] env[63538]: DEBUG oslo_concurrency.lockutils [req-9f7889ca-204d-4882-b64b-6b8d029d9834 req-ff1ca148-5164-4efd-acc1-9a52537ab894 service nova] Lock "2e97b357-0200-4aed-9705-dd7808f853ba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.765867] env[63538]: DEBUG oslo_concurrency.lockutils [req-9f7889ca-204d-4882-b64b-6b8d029d9834 req-ff1ca148-5164-4efd-acc1-9a52537ab894 service nova] Lock "2e97b357-0200-4aed-9705-dd7808f853ba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.766051] env[63538]: DEBUG nova.compute.manager [req-9f7889ca-204d-4882-b64b-6b8d029d9834 req-ff1ca148-5164-4efd-acc1-9a52537ab894 service nova] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] No waiting events found dispatching network-vif-unplugged-5a3ccff0-6550-429c-a4ce-0afa4c25230f {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 966.766265] env[63538]: WARNING nova.compute.manager [req-9f7889ca-204d-4882-b64b-6b8d029d9834 req-ff1ca148-5164-4efd-acc1-9a52537ab894 service nova] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Received unexpected event network-vif-unplugged-5a3ccff0-6550-429c-a4ce-0afa4c25230f for instance with vm_state shelved_offloaded and task_state None. [ 966.766404] env[63538]: DEBUG nova.compute.manager [req-9f7889ca-204d-4882-b64b-6b8d029d9834 req-ff1ca148-5164-4efd-acc1-9a52537ab894 service nova] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Received event network-changed-5a3ccff0-6550-429c-a4ce-0afa4c25230f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 966.766609] env[63538]: DEBUG nova.compute.manager [req-9f7889ca-204d-4882-b64b-6b8d029d9834 req-ff1ca148-5164-4efd-acc1-9a52537ab894 service nova] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Refreshing instance network info cache due to event network-changed-5a3ccff0-6550-429c-a4ce-0afa4c25230f. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 966.766712] env[63538]: DEBUG oslo_concurrency.lockutils [req-9f7889ca-204d-4882-b64b-6b8d029d9834 req-ff1ca148-5164-4efd-acc1-9a52537ab894 service nova] Acquiring lock "refresh_cache-2e97b357-0200-4aed-9705-dd7808f853ba" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.766855] env[63538]: DEBUG oslo_concurrency.lockutils [req-9f7889ca-204d-4882-b64b-6b8d029d9834 req-ff1ca148-5164-4efd-acc1-9a52537ab894 service nova] Acquired lock "refresh_cache-2e97b357-0200-4aed-9705-dd7808f853ba" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.767017] env[63538]: DEBUG nova.network.neutron [req-9f7889ca-204d-4882-b64b-6b8d029d9834 req-ff1ca148-5164-4efd-acc1-9a52537ab894 service nova] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Refreshing network info cache for port 5a3ccff0-6550-429c-a4ce-0afa4c25230f {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 966.774361] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-570dd316-ee86-4330-82ad-f4117ea301be {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.807318] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea3c204b-b317-4824-9a79-a54ecced2f71 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.817284] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eda52e8-dee4-4460-9746-69c2f8265725 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.834540] env[63538]: DEBUG nova.compute.provider_tree [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 966.839282] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Releasing lock "refresh_cache-6bc30d96-8056-421c-875b-c24488e5f595" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.839282] env[63538]: DEBUG nova.compute.manager [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Instance network_info: |[{"id": "67d24d11-311f-4e9b-90b8-20569417ffb1", "address": "fa:16:3e:17:74:6c", "network": {"id": "19a71225-10fa-49f2-ac67-af1873417755", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1401406561-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4efc4733ea894fb7825e52b29ac8b6ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d39252e-42ef-4252-98d3-62af5a0d109d", "external-id": "nsx-vlan-transportzone-190", "segmentation_id": 190, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67d24d11-31", "ovs_interfaceid": "67d24d11-311f-4e9b-90b8-20569417ffb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 966.839282] env[63538]: DEBUG oslo_concurrency.lockutils [req-cda542e9-ff94-4b97-8d84-d53657a9223d req-8f945d63-57b3-4b22-8047-4110f27c9f98 service nova] Acquired lock "refresh_cache-6bc30d96-8056-421c-875b-c24488e5f595" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.839282] env[63538]: DEBUG nova.network.neutron [req-cda542e9-ff94-4b97-8d84-d53657a9223d req-8f945d63-57b3-4b22-8047-4110f27c9f98 service nova] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Refreshing network info cache for port 67d24d11-311f-4e9b-90b8-20569417ffb1 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 966.841147] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:74:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9d39252e-42ef-4252-98d3-62af5a0d109d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67d24d11-311f-4e9b-90b8-20569417ffb1', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 966.850023] env[63538]: DEBUG oslo.service.loopingcall [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 966.851274] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 966.852819] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6f964b1a-0e73-4041-863d-fb8d0d8e97fd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.875723] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 966.875723] env[63538]: value = "task-5101329" [ 966.875723] env[63538]: _type = "Task" [ 966.875723] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.887359] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101329, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.938495] env[63538]: DEBUG oslo_vmware.api [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101327, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.582062} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.938956] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] cf72ac3d-4051-428a-b5bc-7f28accb13c0/cf72ac3d-4051-428a-b5bc-7f28accb13c0.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 966.939120] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 966.939351] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6b659b8c-9310-4f55-b96c-99727715a3d9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.946751] env[63538]: DEBUG oslo_vmware.api [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 966.946751] env[63538]: value = "task-5101330" [ 966.946751] env[63538]: _type = "Task" [ 966.946751] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.960506] env[63538]: DEBUG oslo_vmware.api [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101330, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.977724] env[63538]: DEBUG oslo_vmware.api [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101328, 'name': ReconfigVM_Task, 'duration_secs': 0.430846} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.979662] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 966.979662] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6c454419-43c5-4a57-8188-2397035f5e1a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.988628] env[63538]: DEBUG oslo_vmware.api [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 966.988628] env[63538]: value = "task-5101331" [ 966.988628] env[63538]: _type = "Task" [ 966.988628] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.004099] env[63538]: DEBUG oslo_vmware.api [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101331, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.361495] env[63538]: ERROR nova.scheduler.client.report [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [req-bf78a74b-1735-465b-9942-26d1b716ff03] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f65218a4-1d3d-476a-9093-01cae92c8635. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bf78a74b-1735-465b-9942-26d1b716ff03"}]} [ 967.389404] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101329, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.390582] env[63538]: DEBUG nova.scheduler.client.report [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Refreshing inventories for resource provider f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 967.419103] env[63538]: DEBUG nova.scheduler.client.report [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Updating ProviderTree inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 967.419479] env[63538]: DEBUG nova.compute.provider_tree [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 967.433750] env[63538]: DEBUG nova.scheduler.client.report [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Refreshing aggregate associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, aggregates: None {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 967.459218] env[63538]: DEBUG oslo_vmware.api [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101330, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087994} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.459468] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 967.460325] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0a527ac-05a2-4617-8866-7f08bc8e5e64 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.465778] env[63538]: DEBUG nova.scheduler.client.report [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Refreshing trait associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 967.469511] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b071c2-ed25-4471-a856-00208ebe2ed1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.492634] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] cf72ac3d-4051-428a-b5bc-7f28accb13c0/cf72ac3d-4051-428a-b5bc-7f28accb13c0.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 967.503555] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a01c7b4e-be42-427d-882a-4d13c52b8ccd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.537242] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Updating instance '4ec5d3a2-8b29-4074-b323-f94704043b8b' progress to 0 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 967.549905] env[63538]: DEBUG oslo_vmware.api [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 967.549905] env[63538]: value = "task-5101332" [ 967.549905] env[63538]: _type = "Task" [ 967.549905] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.550134] env[63538]: DEBUG oslo_vmware.api [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101331, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.766408] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquiring lock "8097cb1c-bbba-45a8-be81-64d38decb1df" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.766713] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Lock "8097cb1c-bbba-45a8-be81-64d38decb1df" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.768026] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquiring lock "8097cb1c-bbba-45a8-be81-64d38decb1df-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.768026] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Lock "8097cb1c-bbba-45a8-be81-64d38decb1df-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.768026] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Lock "8097cb1c-bbba-45a8-be81-64d38decb1df-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.770208] env[63538]: INFO nova.compute.manager [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Terminating instance [ 967.772673] env[63538]: DEBUG nova.compute.manager [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 967.772866] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 967.773960] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6329380-41ed-4dae-9d50-14e605e4b82b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.785380] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 967.785726] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17a7a76f-056c-40fc-8c85-a114d9784a5f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.793754] env[63538]: DEBUG oslo_vmware.api [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Waiting for the task: (returnval){ [ 967.793754] env[63538]: value = "task-5101333" [ 967.793754] env[63538]: _type = "Task" [ 967.793754] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.810536] env[63538]: DEBUG oslo_vmware.api [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101333, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.868032] env[63538]: DEBUG nova.network.neutron [req-9f7889ca-204d-4882-b64b-6b8d029d9834 req-ff1ca148-5164-4efd-acc1-9a52537ab894 service nova] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Updated VIF entry in instance network info cache for port 5a3ccff0-6550-429c-a4ce-0afa4c25230f. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 967.868032] env[63538]: DEBUG nova.network.neutron [req-9f7889ca-204d-4882-b64b-6b8d029d9834 req-ff1ca148-5164-4efd-acc1-9a52537ab894 service nova] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Updating instance_info_cache with network_info: [{"id": "5a3ccff0-6550-429c-a4ce-0afa4c25230f", "address": "fa:16:3e:eb:33:f1", "network": {"id": "01450801-f549-4844-80bd-208ec405c65f", "bridge": null, "label": "tempest-DeleteServersTestJSON-684310085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "492427e54e1048f292dab2abdac71af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap5a3ccff0-65", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.890335] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101329, 'name': CreateVM_Task, 'duration_secs': 0.587251} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.890714] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 967.891558] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 967.891853] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.892289] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 967.892670] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-332ffedf-9f93-46c4-8007-920614622f70 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.900696] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 967.900696] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523c9e3e-6e5c-ce85-c077-0ee3cbe08b9a" [ 967.900696] env[63538]: _type = "Task" [ 967.900696] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.907422] env[63538]: DEBUG nova.network.neutron [req-cda542e9-ff94-4b97-8d84-d53657a9223d req-8f945d63-57b3-4b22-8047-4110f27c9f98 service nova] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Updated VIF entry in instance network info cache for port 67d24d11-311f-4e9b-90b8-20569417ffb1. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 967.908258] env[63538]: DEBUG nova.network.neutron [req-cda542e9-ff94-4b97-8d84-d53657a9223d req-8f945d63-57b3-4b22-8047-4110f27c9f98 service nova] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Updating instance_info_cache with network_info: [{"id": "67d24d11-311f-4e9b-90b8-20569417ffb1", "address": "fa:16:3e:17:74:6c", "network": {"id": "19a71225-10fa-49f2-ac67-af1873417755", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1401406561-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4efc4733ea894fb7825e52b29ac8b6ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d39252e-42ef-4252-98d3-62af5a0d109d", "external-id": "nsx-vlan-transportzone-190", "segmentation_id": 190, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67d24d11-31", "ovs_interfaceid": "67d24d11-311f-4e9b-90b8-20569417ffb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.913485] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523c9e3e-6e5c-ce85-c077-0ee3cbe08b9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.920176] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b97892-ee7e-4c34-a97b-50d158b4d072 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.929999] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e162a098-d409-438a-9f2d-4953a57f252a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.969985] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9780408b-4c3e-4ee4-a4e9-0ba3f8283d2a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.981264] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-383c439a-ee58-44c6-a198-0325cf06ff99 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.003676] env[63538]: DEBUG nova.compute.provider_tree [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 968.015431] env[63538]: DEBUG oslo_vmware.api [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101331, 'name': PowerOnVM_Task, 'duration_secs': 0.696618} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.015431] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 968.019599] env[63538]: DEBUG nova.compute.manager [None req-ec1efab1-cac8-4511-afed-634cd39bcfee tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 968.020490] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d2f380-3d06-40c3-9a71-240410bf67c1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.042714] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 968.043789] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf98079b-20be-4a4e-b5f2-d79f3787615e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.055987] env[63538]: DEBUG oslo_vmware.api [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 968.055987] env[63538]: value = "task-5101334" [ 968.055987] env[63538]: _type = "Task" [ 968.055987] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.064534] env[63538]: DEBUG oslo_vmware.api [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101332, 'name': ReconfigVM_Task, 'duration_secs': 0.354969} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.065770] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Reconfigured VM instance instance-00000053 to attach disk [datastore2] cf72ac3d-4051-428a-b5bc-7f28accb13c0/cf72ac3d-4051-428a-b5bc-7f28accb13c0.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 968.066159] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-551e5e3d-6476-43a6-b17f-b9eeeaf6a494 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.071521] env[63538]: DEBUG oslo_vmware.api [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101334, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.078241] env[63538]: DEBUG oslo_vmware.api [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 968.078241] env[63538]: value = "task-5101335" [ 968.078241] env[63538]: _type = "Task" [ 968.078241] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.091278] env[63538]: DEBUG oslo_vmware.api [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101335, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.308657] env[63538]: DEBUG oslo_vmware.api [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101333, 'name': PowerOffVM_Task, 'duration_secs': 0.376782} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.308872] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 968.309065] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 968.309399] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9ae2915f-6d88-4971-8440-43f197fce9fc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.361108] env[63538]: DEBUG oslo_vmware.rw_handles [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b68626-60fe-67bd-25da-5d3a0f30f0a1/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 968.362266] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e051d36e-606c-4c54-9b42-890868f21bb8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.369769] env[63538]: DEBUG oslo_vmware.rw_handles [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b68626-60fe-67bd-25da-5d3a0f30f0a1/disk-0.vmdk is in state: ready. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 968.369932] env[63538]: ERROR oslo_vmware.rw_handles [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b68626-60fe-67bd-25da-5d3a0f30f0a1/disk-0.vmdk due to incomplete transfer. [ 968.370186] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2afecb65-22cd-4666-bd64-19a668e8ab7e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.375166] env[63538]: DEBUG oslo_concurrency.lockutils [req-9f7889ca-204d-4882-b64b-6b8d029d9834 req-ff1ca148-5164-4efd-acc1-9a52537ab894 service nova] Releasing lock "refresh_cache-2e97b357-0200-4aed-9705-dd7808f853ba" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 968.379979] env[63538]: DEBUG oslo_vmware.rw_handles [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b68626-60fe-67bd-25da-5d3a0f30f0a1/disk-0.vmdk. {{(pid=63538) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 968.380201] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Uploaded image 6185ea79-9c71-4180-85df-f64f05052bed to the Glance image server {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 968.383132] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Destroying the VM {{(pid=63538) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 968.386017] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-17eab840-81b0-4bb2-9034-0f0134cab4a4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.393548] env[63538]: DEBUG oslo_vmware.api [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 968.393548] env[63538]: value = "task-5101337" [ 968.393548] env[63538]: _type = "Task" [ 968.393548] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.403075] env[63538]: DEBUG oslo_vmware.api [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101337, 'name': Destroy_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.408504] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 968.409614] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 968.409614] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Deleting the datastore file [datastore2] 8097cb1c-bbba-45a8-be81-64d38decb1df {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 968.409614] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d7e8911-99c5-4674-98ca-5bd5054155aa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.415354] env[63538]: DEBUG oslo_concurrency.lockutils [req-cda542e9-ff94-4b97-8d84-d53657a9223d req-8f945d63-57b3-4b22-8047-4110f27c9f98 service nova] Releasing lock "refresh_cache-6bc30d96-8056-421c-875b-c24488e5f595" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 968.415813] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523c9e3e-6e5c-ce85-c077-0ee3cbe08b9a, 'name': SearchDatastore_Task, 'duration_secs': 0.013625} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.416592] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 968.417052] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 968.417172] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 968.417277] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.417496] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 968.417697] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8b8e194c-0790-4290-ab4f-a59ef368863e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.421331] env[63538]: DEBUG oslo_vmware.api [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Waiting for the task: (returnval){ [ 968.421331] env[63538]: value = "task-5101338" [ 968.421331] env[63538]: _type = "Task" [ 968.421331] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.431401] env[63538]: DEBUG oslo_vmware.api [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101338, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.432630] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 968.432887] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 968.433759] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc8775b2-7145-4b76-81d4-f9c348d81777 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.439871] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 968.439871] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52397df6-750f-b936-f5eb-19cf6616df93" [ 968.439871] env[63538]: _type = "Task" [ 968.439871] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.448672] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52397df6-750f-b936-f5eb-19cf6616df93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.549619] env[63538]: DEBUG nova.scheduler.client.report [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Updated inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 with generation 113 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 968.549942] env[63538]: DEBUG nova.compute.provider_tree [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Updating resource provider f65218a4-1d3d-476a-9093-01cae92c8635 generation from 113 to 114 during operation: update_inventory {{(pid=63538) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 968.550107] env[63538]: DEBUG nova.compute.provider_tree [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 968.567210] env[63538]: DEBUG oslo_vmware.api [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101334, 'name': PowerOffVM_Task, 'duration_secs': 0.239598} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.567344] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 968.567582] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Updating instance '4ec5d3a2-8b29-4074-b323-f94704043b8b' progress to 17 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 968.589957] env[63538]: DEBUG oslo_vmware.api [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101335, 'name': Rename_Task, 'duration_secs': 0.228293} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.590447] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 968.590583] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d28f78c8-7d32-4381-839c-1fcb78b76e72 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.597815] env[63538]: DEBUG oslo_vmware.api [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 968.597815] env[63538]: value = "task-5101339" [ 968.597815] env[63538]: _type = "Task" [ 968.597815] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.613999] env[63538]: DEBUG oslo_vmware.api [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101339, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.709931] env[63538]: DEBUG nova.compute.manager [req-c566edd5-c35a-4b6f-8e84-6e7d80325197 req-e7fad1cf-3673-4871-b29c-9da2d7785bcd service nova] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Received event network-changed-da4e2c60-afe9-437a-ba0d-55b9b358ae8f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 968.709931] env[63538]: DEBUG nova.compute.manager [req-c566edd5-c35a-4b6f-8e84-6e7d80325197 req-e7fad1cf-3673-4871-b29c-9da2d7785bcd service nova] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Refreshing instance network info cache due to event network-changed-da4e2c60-afe9-437a-ba0d-55b9b358ae8f. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 968.709931] env[63538]: DEBUG oslo_concurrency.lockutils [req-c566edd5-c35a-4b6f-8e84-6e7d80325197 req-e7fad1cf-3673-4871-b29c-9da2d7785bcd service nova] Acquiring lock "refresh_cache-8097cb1c-bbba-45a8-be81-64d38decb1df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 968.709931] env[63538]: DEBUG oslo_concurrency.lockutils [req-c566edd5-c35a-4b6f-8e84-6e7d80325197 req-e7fad1cf-3673-4871-b29c-9da2d7785bcd service nova] Acquired lock "refresh_cache-8097cb1c-bbba-45a8-be81-64d38decb1df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.709931] env[63538]: DEBUG nova.network.neutron [req-c566edd5-c35a-4b6f-8e84-6e7d80325197 req-e7fad1cf-3673-4871-b29c-9da2d7785bcd service nova] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Refreshing network info cache for port da4e2c60-afe9-437a-ba0d-55b9b358ae8f {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 968.909120] env[63538]: DEBUG oslo_vmware.api [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101337, 'name': Destroy_Task} progress is 33%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.931837] env[63538]: DEBUG oslo_vmware.api [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101338, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.185289} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.932008] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 968.932220] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 968.932404] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 968.932587] env[63538]: INFO nova.compute.manager [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Took 1.16 seconds to destroy the instance on the hypervisor. [ 968.932849] env[63538]: DEBUG oslo.service.loopingcall [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 968.933127] env[63538]: DEBUG nova.compute.manager [-] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 968.933294] env[63538]: DEBUG nova.network.neutron [-] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 968.952706] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52397df6-750f-b936-f5eb-19cf6616df93, 'name': SearchDatastore_Task, 'duration_secs': 0.029159} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.953601] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17f5b622-1bff-4e1d-994d-28888f4c01b2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.961072] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 968.961072] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526ac5b0-9fe1-6ac0-73bf-39970fdf952d" [ 968.961072] env[63538]: _type = "Task" [ 968.961072] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.973097] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526ac5b0-9fe1-6ac0-73bf-39970fdf952d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.025029] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 969.025029] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 969.058347] env[63538]: DEBUG oslo_concurrency.lockutils [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.284s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.059557] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.987s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.059825] env[63538]: DEBUG nova.objects.instance [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lazy-loading 'resources' on Instance uuid d5d557c6-3d4e-4122-8756-218c9757fa01 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 969.075216] env[63538]: DEBUG nova.virt.hardware [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 969.075216] env[63538]: DEBUG nova.virt.hardware [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 969.075216] env[63538]: DEBUG nova.virt.hardware [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 969.075489] env[63538]: DEBUG nova.virt.hardware [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 969.075540] env[63538]: DEBUG nova.virt.hardware [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 969.075696] env[63538]: DEBUG nova.virt.hardware [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 969.075903] env[63538]: DEBUG nova.virt.hardware [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 969.076075] env[63538]: DEBUG nova.virt.hardware [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 969.076261] env[63538]: DEBUG nova.virt.hardware [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 969.076452] env[63538]: DEBUG nova.virt.hardware [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 969.076658] env[63538]: DEBUG nova.virt.hardware [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 969.084498] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-267caf7a-bb3e-4005-9d79-8e40c5776127 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.097269] env[63538]: INFO nova.scheduler.client.report [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Deleted allocations for instance 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a [ 969.114429] env[63538]: DEBUG oslo_vmware.api [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 969.114429] env[63538]: value = "task-5101340" [ 969.114429] env[63538]: _type = "Task" [ 969.114429] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.118529] env[63538]: DEBUG oslo_vmware.api [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101339, 'name': PowerOnVM_Task, 'duration_secs': 0.498854} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.122801] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 969.123173] env[63538]: INFO nova.compute.manager [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Took 9.27 seconds to spawn the instance on the hypervisor. [ 969.123722] env[63538]: DEBUG nova.compute.manager [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 969.125526] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a967b5-4c1b-4328-89d0-94d12a70301a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.144881] env[63538]: DEBUG oslo_vmware.api [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101340, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.408245] env[63538]: DEBUG oslo_vmware.api [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101337, 'name': Destroy_Task, 'duration_secs': 0.733941} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.408603] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Destroyed the VM [ 969.408603] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Deleting Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 969.408839] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-782aeea6-0c1a-4ec4-b670-367bbc96efcf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.417489] env[63538]: DEBUG oslo_vmware.api [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 969.417489] env[63538]: value = "task-5101341" [ 969.417489] env[63538]: _type = "Task" [ 969.417489] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.427508] env[63538]: DEBUG oslo_vmware.api [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101341, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.474790] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526ac5b0-9fe1-6ac0-73bf-39970fdf952d, 'name': SearchDatastore_Task, 'duration_secs': 0.010927} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.475754] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 969.475754] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 6bc30d96-8056-421c-875b-c24488e5f595/6bc30d96-8056-421c-875b-c24488e5f595.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 969.476297] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-295612e7-81e7-4ff2-a033-4a464e45ba73 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.484710] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 969.484710] env[63538]: value = "task-5101342" [ 969.484710] env[63538]: _type = "Task" [ 969.484710] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.494235] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101342, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.540867] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 969.540867] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Starting heal instance info cache {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10017}} [ 969.625556] env[63538]: DEBUG oslo_concurrency.lockutils [None req-093baea4-c7e7-483e-9b20-02b66b139e47 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lock "5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.851s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.636016] env[63538]: DEBUG oslo_vmware.api [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101340, 'name': ReconfigVM_Task, 'duration_secs': 0.428258} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.637437] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Updating instance '4ec5d3a2-8b29-4074-b323-f94704043b8b' progress to 33 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 969.660228] env[63538]: INFO nova.compute.manager [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Took 35.01 seconds to build instance. [ 969.929342] env[63538]: DEBUG oslo_vmware.api [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101341, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.959288] env[63538]: INFO nova.compute.manager [None req-f2f372b7-d09e-405a-bafa-f5b4158d5c8f tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Unrescuing [ 969.959640] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f2f372b7-d09e-405a-bafa-f5b4158d5c8f tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "refresh_cache-edcc5700-7b1e-494a-82d1-844373a9d5a6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 969.959894] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f2f372b7-d09e-405a-bafa-f5b4158d5c8f tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquired lock "refresh_cache-edcc5700-7b1e-494a-82d1-844373a9d5a6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.960140] env[63538]: DEBUG nova.network.neutron [None req-f2f372b7-d09e-405a-bafa-f5b4158d5c8f tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 970.006425] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101342, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.042938] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-928b09a9-ccca-4b38-9e76-70821f67a9fb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.055492] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ecdfaf-d968-4ebb-ac1c-33c6cc86defa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.088494] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a78a0761-c889-4281-9355-e213bde20660 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.100447] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db4df349-ca42-4b6a-a5fc-61e0560fc07f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.118695] env[63538]: DEBUG nova.compute.provider_tree [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 970.143682] env[63538]: DEBUG nova.virt.hardware [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 970.143944] env[63538]: DEBUG nova.virt.hardware [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 970.144508] env[63538]: DEBUG nova.virt.hardware [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 970.144508] env[63538]: DEBUG nova.virt.hardware [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 970.144508] env[63538]: DEBUG nova.virt.hardware [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 970.144697] env[63538]: DEBUG nova.virt.hardware [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 970.144842] env[63538]: DEBUG nova.virt.hardware [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 970.144960] env[63538]: DEBUG nova.virt.hardware [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 970.145494] env[63538]: DEBUG nova.virt.hardware [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 970.145627] env[63538]: DEBUG nova.virt.hardware [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 970.145799] env[63538]: DEBUG nova.virt.hardware [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 970.152115] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Reconfiguring VM instance instance-0000004d to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 970.156100] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d4a2778-c20b-4ed9-9aa1-907870faa49b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.170193] env[63538]: DEBUG oslo_concurrency.lockutils [None req-48718269-fc08-4557-9e60-d10809ca6e1d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "cf72ac3d-4051-428a-b5bc-7f28accb13c0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.541s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.180031] env[63538]: DEBUG oslo_vmware.api [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 970.180031] env[63538]: value = "task-5101343" [ 970.180031] env[63538]: _type = "Task" [ 970.180031] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.189644] env[63538]: DEBUG oslo_vmware.api [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101343, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.332040] env[63538]: DEBUG nova.network.neutron [req-c566edd5-c35a-4b6f-8e84-6e7d80325197 req-e7fad1cf-3673-4871-b29c-9da2d7785bcd service nova] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Updated VIF entry in instance network info cache for port da4e2c60-afe9-437a-ba0d-55b9b358ae8f. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 970.333427] env[63538]: DEBUG nova.network.neutron [req-c566edd5-c35a-4b6f-8e84-6e7d80325197 req-e7fad1cf-3673-4871-b29c-9da2d7785bcd service nova] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Updating instance_info_cache with network_info: [{"id": "da4e2c60-afe9-437a-ba0d-55b9b358ae8f", "address": "fa:16:3e:8a:cb:bc", "network": {"id": "8497b0d5-d275-4479-ae83-3ef4a1bb795b", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-849919494-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3f6e933bf6c4e71af3b2a1e02d6e42f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ee018eb-75be-4037-a80a-07034d4eae35", "external-id": "nsx-vlan-transportzone-8", "segmentation_id": 8, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda4e2c60-af", "ovs_interfaceid": "da4e2c60-afe9-437a-ba0d-55b9b358ae8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.430793] env[63538]: DEBUG oslo_vmware.api [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101341, 'name': RemoveSnapshot_Task, 'duration_secs': 0.541172} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.431506] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Deleted Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 970.431958] env[63538]: DEBUG nova.compute.manager [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 970.433913] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0235c358-35c5-4a71-9860-69a03a1d3158 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.498739] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101342, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.563125} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.500230] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 6bc30d96-8056-421c-875b-c24488e5f595/6bc30d96-8056-421c-875b-c24488e5f595.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 970.500514] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 970.505612] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-212823bf-a3ef-494f-8b36-f50521f681e8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.515744] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 970.515744] env[63538]: value = "task-5101344" [ 970.515744] env[63538]: _type = "Task" [ 970.515744] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.530176] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101344, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.623726] env[63538]: DEBUG nova.scheduler.client.report [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 970.690533] env[63538]: DEBUG oslo_vmware.api [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101343, 'name': ReconfigVM_Task, 'duration_secs': 0.41193} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.690816] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Reconfigured VM instance instance-0000004d to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 970.691661] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7805e647-c552-48a4-abce-7ec52af68f69 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.709330] env[63538]: DEBUG nova.network.neutron [-] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.721019] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 4ec5d3a2-8b29-4074-b323-f94704043b8b/4ec5d3a2-8b29-4074-b323-f94704043b8b.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 970.721463] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09c9bdd2-176e-4a09-9d40-a0a89ba9caff {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.748700] env[63538]: DEBUG oslo_vmware.api [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 970.748700] env[63538]: value = "task-5101345" [ 970.748700] env[63538]: _type = "Task" [ 970.748700] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.760307] env[63538]: DEBUG oslo_vmware.api [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101345, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.840597] env[63538]: DEBUG oslo_concurrency.lockutils [req-c566edd5-c35a-4b6f-8e84-6e7d80325197 req-e7fad1cf-3673-4871-b29c-9da2d7785bcd service nova] Releasing lock "refresh_cache-8097cb1c-bbba-45a8-be81-64d38decb1df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 970.900046] env[63538]: DEBUG nova.compute.manager [req-6360f83f-b88b-41fb-be79-78c5171aa904 req-36a54fc3-c9c5-4974-b9d6-a1156f2dc598 service nova] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Received event network-vif-deleted-da4e2c60-afe9-437a-ba0d-55b9b358ae8f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 970.950615] env[63538]: INFO nova.compute.manager [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Shelve offloading [ 970.953348] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 970.953748] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7999b30f-a89b-4464-b893-6ce861a1cdcd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.962569] env[63538]: DEBUG oslo_vmware.api [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 970.962569] env[63538]: value = "task-5101346" [ 970.962569] env[63538]: _type = "Task" [ 970.962569] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.976321] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] VM already powered off {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 970.976583] env[63538]: DEBUG nova.compute.manager [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 970.977506] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a2f958-79a8-4e1f-810f-40f59f8e0343 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.985247] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "refresh_cache-0df15328-aebd-44c5-9c78-ee05f188ad95" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.985555] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquired lock "refresh_cache-0df15328-aebd-44c5-9c78-ee05f188ad95" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.985713] env[63538]: DEBUG nova.network.neutron [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 971.008548] env[63538]: DEBUG oslo_concurrency.lockutils [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "cf72ac3d-4051-428a-b5bc-7f28accb13c0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.009451] env[63538]: DEBUG oslo_concurrency.lockutils [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "cf72ac3d-4051-428a-b5bc-7f28accb13c0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.009810] env[63538]: DEBUG oslo_concurrency.lockutils [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "cf72ac3d-4051-428a-b5bc-7f28accb13c0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.010100] env[63538]: DEBUG oslo_concurrency.lockutils [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "cf72ac3d-4051-428a-b5bc-7f28accb13c0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.010357] env[63538]: DEBUG oslo_concurrency.lockutils [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "cf72ac3d-4051-428a-b5bc-7f28accb13c0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.012714] env[63538]: INFO nova.compute.manager [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Terminating instance [ 971.016208] env[63538]: DEBUG nova.compute.manager [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 971.016618] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 971.018329] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86f1bdc-87a7-460b-a311-f3b12ac7772d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.030245] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 971.034112] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-016b9d45-b9ca-4a4e-999c-0c972301fa9e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.036305] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101344, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.365031} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.037066] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 971.038467] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ec79f3-d23e-4aec-a4e1-b541a9537000 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.043883] env[63538]: DEBUG oslo_vmware.api [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 971.043883] env[63538]: value = "task-5101347" [ 971.043883] env[63538]: _type = "Task" [ 971.043883] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.077709] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] 6bc30d96-8056-421c-875b-c24488e5f595/6bc30d96-8056-421c-875b-c24488e5f595.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 971.085552] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca04e874-6cbc-4135-abe9-d154b0f7364a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.104202] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Acquiring lock "376ee3d9-e8b5-4f47-9622-b873126b492e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.104676] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lock "376ee3d9-e8b5-4f47-9622-b873126b492e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.104937] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Acquiring lock "376ee3d9-e8b5-4f47-9622-b873126b492e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.105157] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lock "376ee3d9-e8b5-4f47-9622-b873126b492e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.105365] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lock "376ee3d9-e8b5-4f47-9622-b873126b492e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.109094] env[63538]: DEBUG oslo_vmware.api [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101347, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.109094] env[63538]: INFO nova.compute.manager [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Terminating instance [ 971.113744] env[63538]: DEBUG nova.compute.manager [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 971.113744] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 971.114586] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e060652-cab6-4e2b-9c2a-6c4b73a22045 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.122962] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 971.122962] env[63538]: value = "task-5101348" [ 971.122962] env[63538]: _type = "Task" [ 971.122962] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.128186] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 971.128851] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-78a75c9b-88b8-41e0-8fad-d021301a4d4c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.131081] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.072s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.141406] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.122s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.141406] env[63538]: DEBUG nova.objects.instance [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lazy-loading 'resources' on Instance uuid 2e1b0bc7-3909-48e2-b9be-26822a57ee67 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 971.146445] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101348, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.151484] env[63538]: DEBUG oslo_vmware.api [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 971.151484] env[63538]: value = "task-5101349" [ 971.151484] env[63538]: _type = "Task" [ 971.151484] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.163332] env[63538]: DEBUG oslo_vmware.api [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101349, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.168801] env[63538]: INFO nova.scheduler.client.report [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Deleted allocations for instance d5d557c6-3d4e-4122-8756-218c9757fa01 [ 971.210727] env[63538]: DEBUG nova.network.neutron [None req-f2f372b7-d09e-405a-bafa-f5b4158d5c8f tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Updating instance_info_cache with network_info: [{"id": "cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189", "address": "fa:16:3e:2d:31:f0", "network": {"id": "549f45d6-3d4f-4476-92ba-4bd87fefba51", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-822672880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55edcd65da7b4a569a4c27aab4819cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ded8bac-871f-491b-94ec-cb67c08bc828", "external-id": "nsx-vlan-transportzone-212", "segmentation_id": 212, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcff0fa7b-e0", "ovs_interfaceid": "cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.224116] env[63538]: INFO nova.compute.manager [-] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Took 2.29 seconds to deallocate network for instance. [ 971.260792] env[63538]: DEBUG oslo_vmware.api [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101345, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.555441] env[63538]: DEBUG oslo_vmware.api [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101347, 'name': PowerOffVM_Task, 'duration_secs': 0.480423} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.555749] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 971.555905] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 971.556178] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4affb4e8-1852-4ef1-9d1b-ca97de5efde3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.631552] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 971.631790] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 971.631986] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Deleting the datastore file [datastore2] cf72ac3d-4051-428a-b5bc-7f28accb13c0 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 971.636025] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3eaaf6f4-1a86-4c4e-b41f-fbd6c5531ee4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.638500] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101348, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.643022] env[63538]: DEBUG oslo_vmware.api [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 971.643022] env[63538]: value = "task-5101351" [ 971.643022] env[63538]: _type = "Task" [ 971.643022] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.651476] env[63538]: DEBUG oslo_vmware.api [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101351, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.662486] env[63538]: DEBUG oslo_vmware.api [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101349, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.677038] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6567ffff-a83b-4afb-9014-864cf5d82f78 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "d5d557c6-3d4e-4122-8756-218c9757fa01" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.230s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.715520] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f2f372b7-d09e-405a-bafa-f5b4158d5c8f tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Releasing lock "refresh_cache-edcc5700-7b1e-494a-82d1-844373a9d5a6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.716663] env[63538]: DEBUG nova.objects.instance [None req-f2f372b7-d09e-405a-bafa-f5b4158d5c8f tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lazy-loading 'flavor' on Instance uuid edcc5700-7b1e-494a-82d1-844373a9d5a6 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 971.734082] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.763772] env[63538]: DEBUG oslo_vmware.api [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101345, 'name': ReconfigVM_Task, 'duration_secs': 0.800808} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.763896] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 4ec5d3a2-8b29-4074-b323-f94704043b8b/4ec5d3a2-8b29-4074-b323-f94704043b8b.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 971.764104] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Updating instance '4ec5d3a2-8b29-4074-b323-f94704043b8b' progress to 50 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 972.002069] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82d31de-1d98-4b6d-a674-9453d7cc1b75 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.013069] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2647ced-13aa-42cc-b765-413e30f422c3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.052722] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a0b3ce-186a-48bb-947b-c19a505f2edb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.062541] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-404bd60f-d3ac-4800-8f9b-2da8d1db7d96 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.085427] env[63538]: DEBUG nova.compute.provider_tree [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 972.134810] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101348, 'name': ReconfigVM_Task, 'duration_secs': 0.612736} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.135566] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Reconfigured VM instance instance-00000054 to attach disk [datastore2] 6bc30d96-8056-421c-875b-c24488e5f595/6bc30d96-8056-421c-875b-c24488e5f595.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 972.136757] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-086c7f43-2599-45cc-ac93-53a997de0f76 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.145701] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 972.145701] env[63538]: value = "task-5101352" [ 972.145701] env[63538]: _type = "Task" [ 972.145701] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.153854] env[63538]: DEBUG oslo_vmware.api [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101351, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.194031} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.157720] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 972.158326] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 972.158659] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 972.158957] env[63538]: INFO nova.compute.manager [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Took 1.14 seconds to destroy the instance on the hypervisor. [ 972.159329] env[63538]: DEBUG oslo.service.loopingcall [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 972.166060] env[63538]: DEBUG nova.compute.manager [-] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 972.166060] env[63538]: DEBUG nova.network.neutron [-] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 972.166060] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101352, 'name': Rename_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.172308] env[63538]: DEBUG oslo_vmware.api [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101349, 'name': PowerOffVM_Task, 'duration_secs': 0.565572} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.172308] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 972.172308] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 972.172308] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-099f78aa-37c2-4b3f-868a-4ee4003b974a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.220314] env[63538]: DEBUG nova.network.neutron [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Updating instance_info_cache with network_info: [{"id": "8a332a90-393f-41ae-a924-4959c06e6207", "address": "fa:16:3e:37:0e:9d", "network": {"id": "8fd1ecf4-3813-4ca4-82b4-8ba3f04e3c41", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1232144260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fe11c1386b14d139f4416cbf20fb201", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a332a90-39", "ovs_interfaceid": "8a332a90-393f-41ae-a924-4959c06e6207", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.230490] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe3ca565-945c-417b-89d1-6823dc539c35 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.267373] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2f372b7-d09e-405a-bafa-f5b4158d5c8f tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 972.270697] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-981acc97-c5e4-40a1-837c-0b5f370562eb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.276339] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 972.276339] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 972.276339] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Deleting the datastore file [datastore1] 376ee3d9-e8b5-4f47-9622-b873126b492e {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 972.277978] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d04015bf-abcf-418f-a4df-89d57a43ff18 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.280882] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-67fbaa6e-5ffa-4dd3-a65e-fcfa13aead27 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.305888] env[63538]: DEBUG oslo_vmware.api [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for the task: (returnval){ [ 972.305888] env[63538]: value = "task-5101355" [ 972.305888] env[63538]: _type = "Task" [ 972.305888] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.308783] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e581d5-4183-40a2-8f6f-572b7009890e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.311620] env[63538]: DEBUG oslo_vmware.api [None req-f2f372b7-d09e-405a-bafa-f5b4158d5c8f tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 972.311620] env[63538]: value = "task-5101354" [ 972.311620] env[63538]: _type = "Task" [ 972.311620] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.334899] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Updating instance '4ec5d3a2-8b29-4074-b323-f94704043b8b' progress to 67 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 972.343389] env[63538]: DEBUG oslo_vmware.api [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101355, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.349175] env[63538]: DEBUG oslo_vmware.api [None req-f2f372b7-d09e-405a-bafa-f5b4158d5c8f tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101354, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.591290] env[63538]: DEBUG nova.scheduler.client.report [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 972.658872] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101352, 'name': Rename_Task, 'duration_secs': 0.296426} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.659387] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 972.659874] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6e7e63bc-fc2a-4923-afde-fcf72013d473 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.669074] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 972.669074] env[63538]: value = "task-5101356" [ 972.669074] env[63538]: _type = "Task" [ 972.669074] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.688254] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101356, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.728567] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Releasing lock "refresh_cache-0df15328-aebd-44c5-9c78-ee05f188ad95" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.823343] env[63538]: DEBUG oslo_vmware.api [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Task: {'id': task-5101355, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172626} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.827146] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 972.827372] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 972.827834] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 972.828107] env[63538]: INFO nova.compute.manager [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Took 1.71 seconds to destroy the instance on the hypervisor. [ 972.828369] env[63538]: DEBUG oslo.service.loopingcall [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 972.828596] env[63538]: DEBUG nova.compute.manager [-] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 972.828693] env[63538]: DEBUG nova.network.neutron [-] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 972.839328] env[63538]: DEBUG oslo_vmware.api [None req-f2f372b7-d09e-405a-bafa-f5b4158d5c8f tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101354, 'name': PowerOffVM_Task, 'duration_secs': 0.238322} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.839853] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2f372b7-d09e-405a-bafa-f5b4158d5c8f tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 972.845325] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2f372b7-d09e-405a-bafa-f5b4158d5c8f tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Reconfiguring VM instance instance-00000048 to detach disk 2002 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 972.848931] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3da142f8-8aac-439c-a326-8d3e30e2e9eb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.871296] env[63538]: DEBUG oslo_vmware.api [None req-f2f372b7-d09e-405a-bafa-f5b4158d5c8f tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 972.871296] env[63538]: value = "task-5101357" [ 972.871296] env[63538]: _type = "Task" [ 972.871296] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.882239] env[63538]: DEBUG oslo_vmware.api [None req-f2f372b7-d09e-405a-bafa-f5b4158d5c8f tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101357, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.964080] env[63538]: DEBUG nova.network.neutron [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Port 31cf3b33-b97d-4183-a21c-80e24e27351f binding to destination host cpu-1 is already ACTIVE {{(pid=63538) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 973.098745] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.962s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.106022] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.280s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.108342] env[63538]: INFO nova.compute.claims [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 973.137414] env[63538]: INFO nova.scheduler.client.report [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Deleted allocations for instance 2e1b0bc7-3909-48e2-b9be-26822a57ee67 [ 973.186608] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101356, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.286539] env[63538]: DEBUG nova.compute.manager [req-0246348f-0113-464e-afb4-a1f2a6fda041 req-55498d1b-806a-49e0-9623-5955b8a8b3e1 service nova] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Received event network-vif-deleted-d1b61459-5c9a-429e-aa08-d0d2d0c5c846 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 973.286747] env[63538]: INFO nova.compute.manager [req-0246348f-0113-464e-afb4-a1f2a6fda041 req-55498d1b-806a-49e0-9623-5955b8a8b3e1 service nova] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Neutron deleted interface d1b61459-5c9a-429e-aa08-d0d2d0c5c846; detaching it from the instance and deleting it from the info cache [ 973.286921] env[63538]: DEBUG nova.network.neutron [req-0246348f-0113-464e-afb4-a1f2a6fda041 req-55498d1b-806a-49e0-9623-5955b8a8b3e1 service nova] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.395321] env[63538]: DEBUG oslo_vmware.api [None req-f2f372b7-d09e-405a-bafa-f5b4158d5c8f tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101357, 'name': ReconfigVM_Task, 'duration_secs': 0.293084} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.395821] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2f372b7-d09e-405a-bafa-f5b4158d5c8f tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Reconfigured VM instance instance-00000048 to detach disk 2002 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 973.395821] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2f372b7-d09e-405a-bafa-f5b4158d5c8f tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 973.396122] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e29d07ce-2054-4212-b5eb-a999944e5821 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.404848] env[63538]: DEBUG oslo_vmware.api [None req-f2f372b7-d09e-405a-bafa-f5b4158d5c8f tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 973.404848] env[63538]: value = "task-5101358" [ 973.404848] env[63538]: _type = "Task" [ 973.404848] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.418802] env[63538]: DEBUG oslo_vmware.api [None req-f2f372b7-d09e-405a-bafa-f5b4158d5c8f tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101358, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.565620] env[63538]: DEBUG nova.compute.manager [req-11400151-9b52-4347-a91f-2b965d295160 req-336888e7-a6de-4fb9-b5af-773811a1c8f5 service nova] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Received event network-vif-deleted-5487fc0a-b645-4a02-b47f-772c0eabd9d4 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 973.565748] env[63538]: INFO nova.compute.manager [req-11400151-9b52-4347-a91f-2b965d295160 req-336888e7-a6de-4fb9-b5af-773811a1c8f5 service nova] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Neutron deleted interface 5487fc0a-b645-4a02-b47f-772c0eabd9d4; detaching it from the instance and deleting it from the info cache [ 973.565932] env[63538]: DEBUG nova.network.neutron [req-11400151-9b52-4347-a91f-2b965d295160 req-336888e7-a6de-4fb9-b5af-773811a1c8f5 service nova] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.649496] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce4b0631-9555-4ad5-a579-4dabdfaf2fac tempest-MigrationsAdminTest-713495968 tempest-MigrationsAdminTest-713495968-project-member] Lock "2e1b0bc7-3909-48e2-b9be-26822a57ee67" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.822s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.686149] env[63538]: DEBUG oslo_vmware.api [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101356, 'name': PowerOnVM_Task, 'duration_secs': 0.588823} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.686149] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 973.686149] env[63538]: INFO nova.compute.manager [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Took 11.25 seconds to spawn the instance on the hypervisor. [ 973.686149] env[63538]: DEBUG nova.compute.manager [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 973.686149] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-791f059f-9961-4aeb-9175-27057b3a98a6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.697379] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "refresh_cache-ede967c0-ec3a-4f26-8290-0ee36890cd75" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.697525] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquired lock "refresh_cache-ede967c0-ec3a-4f26-8290-0ee36890cd75" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.697651] env[63538]: DEBUG nova.network.neutron [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Forcefully refreshing network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 973.747989] env[63538]: DEBUG nova.network.neutron [-] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.792680] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-718febfa-0e5c-452b-848e-be11f46cba44 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.806782] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd9316a-2056-4432-8f23-88e762985c55 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.866114] env[63538]: DEBUG nova.compute.manager [req-0246348f-0113-464e-afb4-a1f2a6fda041 req-55498d1b-806a-49e0-9623-5955b8a8b3e1 service nova] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Detach interface failed, port_id=d1b61459-5c9a-429e-aa08-d0d2d0c5c846, reason: Instance cf72ac3d-4051-428a-b5bc-7f28accb13c0 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 973.889954] env[63538]: DEBUG nova.network.neutron [-] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.923833] env[63538]: DEBUG oslo_vmware.api [None req-f2f372b7-d09e-405a-bafa-f5b4158d5c8f tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101358, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.989860] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "4ec5d3a2-8b29-4074-b323-f94704043b8b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.990216] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "4ec5d3a2-8b29-4074-b323-f94704043b8b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.990426] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "4ec5d3a2-8b29-4074-b323-f94704043b8b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.001800] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 974.003104] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4b93e21-dee8-4239-8264-c41740a0a28f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.016522] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 974.018348] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-750680fe-a68b-4826-b0ea-566b9ab41d41 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.075471] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-92d3ffed-39b6-47f4-871f-075b8416f307 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.087799] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-601a74ee-b21f-454a-92c1-40a579d1e47b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.122584] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 974.122584] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 974.122584] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Deleting the datastore file [datastore2] 0df15328-aebd-44c5-9c78-ee05f188ad95 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 974.144638] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-efb38bdb-b31f-46ee-9a8f-24c95923d52e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.153736] env[63538]: DEBUG nova.compute.manager [req-11400151-9b52-4347-a91f-2b965d295160 req-336888e7-a6de-4fb9-b5af-773811a1c8f5 service nova] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Detach interface failed, port_id=5487fc0a-b645-4a02-b47f-772c0eabd9d4, reason: Instance 376ee3d9-e8b5-4f47-9622-b873126b492e could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 974.164732] env[63538]: DEBUG oslo_vmware.api [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 974.164732] env[63538]: value = "task-5101360" [ 974.164732] env[63538]: _type = "Task" [ 974.164732] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.178750] env[63538]: DEBUG oslo_vmware.api [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101360, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.218127] env[63538]: INFO nova.compute.manager [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Took 36.99 seconds to build instance. [ 974.249483] env[63538]: INFO nova.compute.manager [-] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Took 2.09 seconds to deallocate network for instance. [ 974.398087] env[63538]: INFO nova.compute.manager [-] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Took 1.57 seconds to deallocate network for instance. [ 974.430587] env[63538]: DEBUG oslo_vmware.api [None req-f2f372b7-d09e-405a-bafa-f5b4158d5c8f tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101358, 'name': PowerOnVM_Task, 'duration_secs': 0.735665} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.433876] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2f372b7-d09e-405a-bafa-f5b4158d5c8f tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 974.434171] env[63538]: DEBUG nova.compute.manager [None req-f2f372b7-d09e-405a-bafa-f5b4158d5c8f tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 974.435389] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e15dc425-efc6-4797-ae67-888da966b776 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.599553] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4511761-924d-4c82-a5b4-64caed3d9bc9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.609603] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8069ce46-caf7-45be-9d09-0114a36d0287 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.659453] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b98644b-a99c-48de-82ed-c984519182c1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.669617] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-622b1acc-c193-4525-8daa-026d04c7b271 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.683288] env[63538]: DEBUG oslo_vmware.api [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101360, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.196287} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.691265] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 974.691546] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 974.691847] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 974.697548] env[63538]: DEBUG nova.compute.provider_tree [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 974.722120] env[63538]: INFO nova.scheduler.client.report [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Deleted allocations for instance 0df15328-aebd-44c5-9c78-ee05f188ad95 [ 974.727855] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b4701874-5008-461f-9344-3800bf3b9ef1 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "6bc30d96-8056-421c-875b-c24488e5f595" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.506s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.759616] env[63538]: DEBUG oslo_concurrency.lockutils [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.916539] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.496382] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.518304] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "refresh_cache-4ec5d3a2-8b29-4074-b323-f94704043b8b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.518823] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired lock "refresh_cache-4ec5d3a2-8b29-4074-b323-f94704043b8b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.518823] env[63538]: DEBUG nova.network.neutron [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 975.524482] env[63538]: ERROR nova.scheduler.client.report [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [req-fe9db57b-1e5d-4412-8c3f-059efaeca336] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f65218a4-1d3d-476a-9093-01cae92c8635. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-fe9db57b-1e5d-4412-8c3f-059efaeca336"}]} [ 975.563189] env[63538]: DEBUG nova.scheduler.client.report [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Refreshing inventories for resource provider f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 975.582270] env[63538]: DEBUG nova.scheduler.client.report [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Updating ProviderTree inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 975.582270] env[63538]: DEBUG nova.compute.provider_tree [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 975.604043] env[63538]: DEBUG nova.scheduler.client.report [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Refreshing aggregate associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, aggregates: None {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 975.638540] env[63538]: DEBUG nova.scheduler.client.report [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Refreshing trait associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 975.701149] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "431a67e6-b90d-4930-9a86-7c49d1022ddc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.701465] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "431a67e6-b90d-4930-9a86-7c49d1022ddc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.733128] env[63538]: DEBUG nova.network.neutron [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Updating instance_info_cache with network_info: [{"id": "f752fb93-15ab-4803-9e58-012b22d5f121", "address": "fa:16:3e:0f:19:27", "network": {"id": "4aa7976c-5900-4be2-a5e2-8d641ac24be9", "bridge": "br-int", "label": "tempest-ServersTestJSON-119331503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea05f3fb4676466bb2a286f5a2fefb8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf752fb93-15", "ovs_interfaceid": "f752fb93-15ab-4803-9e58-012b22d5f121", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.739383] env[63538]: DEBUG nova.compute.manager [req-602af19f-e638-44e7-95d0-2c873b3c8788 req-69192181-f113-43c8-9f7b-febe54155712 service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Received event network-vif-unplugged-8a332a90-393f-41ae-a924-4959c06e6207 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 975.739769] env[63538]: DEBUG oslo_concurrency.lockutils [req-602af19f-e638-44e7-95d0-2c873b3c8788 req-69192181-f113-43c8-9f7b-febe54155712 service nova] Acquiring lock "0df15328-aebd-44c5-9c78-ee05f188ad95-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.739871] env[63538]: DEBUG oslo_concurrency.lockutils [req-602af19f-e638-44e7-95d0-2c873b3c8788 req-69192181-f113-43c8-9f7b-febe54155712 service nova] Lock "0df15328-aebd-44c5-9c78-ee05f188ad95-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.740319] env[63538]: DEBUG oslo_concurrency.lockutils [req-602af19f-e638-44e7-95d0-2c873b3c8788 req-69192181-f113-43c8-9f7b-febe54155712 service nova] Lock "0df15328-aebd-44c5-9c78-ee05f188ad95-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.740319] env[63538]: DEBUG nova.compute.manager [req-602af19f-e638-44e7-95d0-2c873b3c8788 req-69192181-f113-43c8-9f7b-febe54155712 service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] No waiting events found dispatching network-vif-unplugged-8a332a90-393f-41ae-a924-4959c06e6207 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 975.740497] env[63538]: WARNING nova.compute.manager [req-602af19f-e638-44e7-95d0-2c873b3c8788 req-69192181-f113-43c8-9f7b-febe54155712 service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Received unexpected event network-vif-unplugged-8a332a90-393f-41ae-a924-4959c06e6207 for instance with vm_state shelved_offloaded and task_state None. [ 975.740768] env[63538]: DEBUG nova.compute.manager [req-602af19f-e638-44e7-95d0-2c873b3c8788 req-69192181-f113-43c8-9f7b-febe54155712 service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Received event network-changed-8a332a90-393f-41ae-a924-4959c06e6207 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 975.741025] env[63538]: DEBUG nova.compute.manager [req-602af19f-e638-44e7-95d0-2c873b3c8788 req-69192181-f113-43c8-9f7b-febe54155712 service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Refreshing instance network info cache due to event network-changed-8a332a90-393f-41ae-a924-4959c06e6207. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 975.741154] env[63538]: DEBUG oslo_concurrency.lockutils [req-602af19f-e638-44e7-95d0-2c873b3c8788 req-69192181-f113-43c8-9f7b-febe54155712 service nova] Acquiring lock "refresh_cache-0df15328-aebd-44c5-9c78-ee05f188ad95" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.741428] env[63538]: DEBUG oslo_concurrency.lockutils [req-602af19f-e638-44e7-95d0-2c873b3c8788 req-69192181-f113-43c8-9f7b-febe54155712 service nova] Acquired lock "refresh_cache-0df15328-aebd-44c5-9c78-ee05f188ad95" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.743144] env[63538]: DEBUG nova.network.neutron [req-602af19f-e638-44e7-95d0-2c873b3c8788 req-69192181-f113-43c8-9f7b-febe54155712 service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Refreshing network info cache for port 8a332a90-393f-41ae-a924-4959c06e6207 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 976.003333] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a72c0f-0c6a-4506-8280-82f8daac66f1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.020116] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae27164-1188-4e13-aa10-6e3fd94da2a4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.061545] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-749045cb-c9a1-41db-b94e-94a40a2f95a8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.071773] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bb31620-3208-4e65-bbdb-b882ff16a85f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.092713] env[63538]: DEBUG nova.compute.provider_tree [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 976.204782] env[63538]: DEBUG nova.compute.manager [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 976.235368] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Releasing lock "refresh_cache-ede967c0-ec3a-4f26-8290-0ee36890cd75" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.237495] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Updated the network info_cache for instance {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10088}} [ 976.237495] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.237495] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.237495] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.237495] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.237495] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.237495] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.239830] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63538) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10636}} [ 976.239830] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.370760] env[63538]: DEBUG nova.network.neutron [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Updating instance_info_cache with network_info: [{"id": "31cf3b33-b97d-4183-a21c-80e24e27351f", "address": "fa:16:3e:e0:a1:3e", "network": {"id": "8facc2ff-7a17-4beb-ad4f-7cf9d95cc93c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-137157780-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a701618902d411b8af203fdbb1069be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31cf3b33-b9", "ovs_interfaceid": "31cf3b33-b97d-4183-a21c-80e24e27351f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.642147] env[63538]: DEBUG nova.scheduler.client.report [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Updated inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 with generation 115 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 976.642147] env[63538]: DEBUG nova.compute.provider_tree [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Updating resource provider f65218a4-1d3d-476a-9093-01cae92c8635 generation from 115 to 116 during operation: update_inventory {{(pid=63538) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 976.642147] env[63538]: DEBUG nova.compute.provider_tree [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 976.670029] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquiring lock "6bc30d96-8056-421c-875b-c24488e5f595" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.670029] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "6bc30d96-8056-421c-875b-c24488e5f595" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.670029] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquiring lock "6bc30d96-8056-421c-875b-c24488e5f595-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.670029] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "6bc30d96-8056-421c-875b-c24488e5f595-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.670029] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "6bc30d96-8056-421c-875b-c24488e5f595-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.677034] env[63538]: INFO nova.compute.manager [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Terminating instance [ 976.679951] env[63538]: DEBUG nova.compute.manager [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 976.680177] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 976.684915] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6bd6604-dba1-4653-8a74-08467226ced6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.702024] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 976.702024] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b3b84a5-c196-4eae-a132-4d61b62a7354 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.710145] env[63538]: DEBUG oslo_vmware.api [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 976.710145] env[63538]: value = "task-5101361" [ 976.710145] env[63538]: _type = "Task" [ 976.710145] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.726531] env[63538]: DEBUG oslo_vmware.api [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101361, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.743316] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.744792] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.873964] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Releasing lock "refresh_cache-4ec5d3a2-8b29-4074-b323-f94704043b8b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 977.154995] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.050s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.159022] env[63538]: DEBUG nova.compute.manager [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 977.161122] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.913s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.164703] env[63538]: INFO nova.compute.claims [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 977.177043] env[63538]: DEBUG nova.network.neutron [req-602af19f-e638-44e7-95d0-2c873b3c8788 req-69192181-f113-43c8-9f7b-febe54155712 service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Updated VIF entry in instance network info cache for port 8a332a90-393f-41ae-a924-4959c06e6207. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 977.177750] env[63538]: DEBUG nova.network.neutron [req-602af19f-e638-44e7-95d0-2c873b3c8788 req-69192181-f113-43c8-9f7b-febe54155712 service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Updating instance_info_cache with network_info: [{"id": "8a332a90-393f-41ae-a924-4959c06e6207", "address": "fa:16:3e:37:0e:9d", "network": {"id": "8fd1ecf4-3813-4ca4-82b4-8ba3f04e3c41", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1232144260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fe11c1386b14d139f4416cbf20fb201", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap8a332a90-39", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.225485] env[63538]: DEBUG oslo_vmware.api [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101361, 'name': PowerOffVM_Task, 'duration_secs': 0.355814} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.225787] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 977.225968] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 977.226741] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8ef648ab-c49d-4a54-9701-c630f0d3b8c8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.328094] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 977.328246] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 977.328407] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Deleting the datastore file [datastore2] 6bc30d96-8056-421c-875b-c24488e5f595 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 977.328693] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-265dff92-4b5c-4613-b19d-64196117f348 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.339973] env[63538]: DEBUG oslo_vmware.api [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for the task: (returnval){ [ 977.339973] env[63538]: value = "task-5101363" [ 977.339973] env[63538]: _type = "Task" [ 977.339973] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.353222] env[63538]: DEBUG oslo_vmware.api [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101363, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.411970] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d624cd-7622-4166-abb7-70e9b4fa2657 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.444867] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45d7d871-d02c-4a99-a119-dc873cc7a4d7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.453965] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Updating instance '4ec5d3a2-8b29-4074-b323-f94704043b8b' progress to 83 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 977.670598] env[63538]: DEBUG nova.compute.utils [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 977.675536] env[63538]: DEBUG nova.compute.manager [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 977.675536] env[63538]: DEBUG nova.network.neutron [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 977.681137] env[63538]: DEBUG oslo_concurrency.lockutils [req-602af19f-e638-44e7-95d0-2c873b3c8788 req-69192181-f113-43c8-9f7b-febe54155712 service nova] Releasing lock "refresh_cache-0df15328-aebd-44c5-9c78-ee05f188ad95" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 977.776462] env[63538]: DEBUG nova.compute.manager [req-1593ad2a-95d0-43d5-a098-4a3e2c02ccc3 req-42197f7f-7ef5-418f-8211-084b4c0279a3 service nova] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Received event network-changed-cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 977.776462] env[63538]: DEBUG nova.compute.manager [req-1593ad2a-95d0-43d5-a098-4a3e2c02ccc3 req-42197f7f-7ef5-418f-8211-084b4c0279a3 service nova] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Refreshing instance network info cache due to event network-changed-cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 977.776462] env[63538]: DEBUG oslo_concurrency.lockutils [req-1593ad2a-95d0-43d5-a098-4a3e2c02ccc3 req-42197f7f-7ef5-418f-8211-084b4c0279a3 service nova] Acquiring lock "refresh_cache-edcc5700-7b1e-494a-82d1-844373a9d5a6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 977.776462] env[63538]: DEBUG oslo_concurrency.lockutils [req-1593ad2a-95d0-43d5-a098-4a3e2c02ccc3 req-42197f7f-7ef5-418f-8211-084b4c0279a3 service nova] Acquired lock "refresh_cache-edcc5700-7b1e-494a-82d1-844373a9d5a6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.776462] env[63538]: DEBUG nova.network.neutron [req-1593ad2a-95d0-43d5-a098-4a3e2c02ccc3 req-42197f7f-7ef5-418f-8211-084b4c0279a3 service nova] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Refreshing network info cache for port cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 977.780647] env[63538]: DEBUG nova.policy [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ebe77c0e32ce4a32b290ba5088e107f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7063c42297c24f2baf7271fa25dec927', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 977.796873] env[63538]: DEBUG oslo_concurrency.lockutils [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "3d80dc17-e330-4575-8e12-e06d8e76274a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.796873] env[63538]: DEBUG oslo_concurrency.lockutils [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "3d80dc17-e330-4575-8e12-e06d8e76274a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.854330] env[63538]: DEBUG oslo_vmware.api [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Task: {'id': task-5101363, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160022} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.854632] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 977.854830] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 977.855024] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 977.855218] env[63538]: INFO nova.compute.manager [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Took 1.18 seconds to destroy the instance on the hypervisor. [ 977.856154] env[63538]: DEBUG oslo.service.loopingcall [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 977.856154] env[63538]: DEBUG nova.compute.manager [-] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 977.856154] env[63538]: DEBUG nova.network.neutron [-] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 977.961457] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 977.961845] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ae35f0ce-e821-4788-97b8-9e7aa741aa58 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.971628] env[63538]: DEBUG oslo_vmware.api [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 977.971628] env[63538]: value = "task-5101364" [ 977.971628] env[63538]: _type = "Task" [ 977.971628] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.982771] env[63538]: DEBUG oslo_vmware.api [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101364, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.174845] env[63538]: DEBUG nova.compute.manager [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 978.299135] env[63538]: DEBUG nova.compute.manager [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 978.406599] env[63538]: DEBUG nova.network.neutron [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Successfully created port: 8240a40a-4486-4213-ac28-8eee15d652a8 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 978.483538] env[63538]: DEBUG oslo_vmware.api [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101364, 'name': PowerOnVM_Task, 'duration_secs': 0.424016} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.483538] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 978.483538] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5c8b13b5-918a-4611-be62-09d77dbe6997 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Updating instance '4ec5d3a2-8b29-4074-b323-f94704043b8b' progress to 100 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 978.583220] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4540852-ce15-4172-8536-568bdd648e9d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.592211] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0fd45e3-c757-4916-8d30-db07875dd964 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.629951] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c40ceea-76dd-43ac-bdc6-758c740d395d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.633157] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "0df15328-aebd-44c5-9c78-ee05f188ad95" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.640397] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-881b9a83-2236-4488-8f4a-b715fb1a32cc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.657530] env[63538]: DEBUG nova.compute.provider_tree [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 978.826959] env[63538]: DEBUG oslo_concurrency.lockutils [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.934374] env[63538]: DEBUG nova.network.neutron [req-1593ad2a-95d0-43d5-a098-4a3e2c02ccc3 req-42197f7f-7ef5-418f-8211-084b4c0279a3 service nova] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Updated VIF entry in instance network info cache for port cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 978.934374] env[63538]: DEBUG nova.network.neutron [req-1593ad2a-95d0-43d5-a098-4a3e2c02ccc3 req-42197f7f-7ef5-418f-8211-084b4c0279a3 service nova] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Updating instance_info_cache with network_info: [{"id": "cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189", "address": "fa:16:3e:2d:31:f0", "network": {"id": "549f45d6-3d4f-4476-92ba-4bd87fefba51", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-822672880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55edcd65da7b4a569a4c27aab4819cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ded8bac-871f-491b-94ec-cb67c08bc828", "external-id": "nsx-vlan-transportzone-212", "segmentation_id": 212, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcff0fa7b-e0", "ovs_interfaceid": "cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.162440] env[63538]: DEBUG nova.scheduler.client.report [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 979.166464] env[63538]: DEBUG nova.network.neutron [-] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.186371] env[63538]: DEBUG nova.compute.manager [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 979.238240] env[63538]: DEBUG nova.virt.hardware [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 979.238679] env[63538]: DEBUG nova.virt.hardware [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 979.238899] env[63538]: DEBUG nova.virt.hardware [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 979.239410] env[63538]: DEBUG nova.virt.hardware [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 979.239702] env[63538]: DEBUG nova.virt.hardware [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 979.244050] env[63538]: DEBUG nova.virt.hardware [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 979.244050] env[63538]: DEBUG nova.virt.hardware [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 979.244342] env[63538]: DEBUG nova.virt.hardware [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 979.244453] env[63538]: DEBUG nova.virt.hardware [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 979.244644] env[63538]: DEBUG nova.virt.hardware [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 979.244932] env[63538]: DEBUG nova.virt.hardware [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 979.245804] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2575c62-9633-4e3c-a003-4f45aecaedf3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.255438] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3330f1da-6a1e-4eb2-939e-53db37a01c8b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.438877] env[63538]: DEBUG oslo_concurrency.lockutils [req-1593ad2a-95d0-43d5-a098-4a3e2c02ccc3 req-42197f7f-7ef5-418f-8211-084b4c0279a3 service nova] Releasing lock "refresh_cache-edcc5700-7b1e-494a-82d1-844373a9d5a6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 979.439201] env[63538]: DEBUG nova.compute.manager [req-1593ad2a-95d0-43d5-a098-4a3e2c02ccc3 req-42197f7f-7ef5-418f-8211-084b4c0279a3 service nova] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Received event network-changed-cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 979.439386] env[63538]: DEBUG nova.compute.manager [req-1593ad2a-95d0-43d5-a098-4a3e2c02ccc3 req-42197f7f-7ef5-418f-8211-084b4c0279a3 service nova] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Refreshing instance network info cache due to event network-changed-cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 979.440389] env[63538]: DEBUG oslo_concurrency.lockutils [req-1593ad2a-95d0-43d5-a098-4a3e2c02ccc3 req-42197f7f-7ef5-418f-8211-084b4c0279a3 service nova] Acquiring lock "refresh_cache-edcc5700-7b1e-494a-82d1-844373a9d5a6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 979.440389] env[63538]: DEBUG oslo_concurrency.lockutils [req-1593ad2a-95d0-43d5-a098-4a3e2c02ccc3 req-42197f7f-7ef5-418f-8211-084b4c0279a3 service nova] Acquired lock "refresh_cache-edcc5700-7b1e-494a-82d1-844373a9d5a6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.440389] env[63538]: DEBUG nova.network.neutron [req-1593ad2a-95d0-43d5-a098-4a3e2c02ccc3 req-42197f7f-7ef5-418f-8211-084b4c0279a3 service nova] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Refreshing network info cache for port cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 979.669998] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.509s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.670589] env[63538]: DEBUG nova.compute.manager [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 979.673511] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.012s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 979.675739] env[63538]: INFO nova.compute.claims [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 979.682674] env[63538]: INFO nova.compute.manager [-] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Took 1.83 seconds to deallocate network for instance. [ 979.833735] env[63538]: DEBUG nova.compute.manager [req-0e966d32-3321-4c2e-aeb3-2bd12f3bf115 req-d008b88a-9395-4422-ab2f-191312196f9f service nova] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Received event network-vif-deleted-67d24d11-311f-4e9b-90b8-20569417ffb1 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 980.188021] env[63538]: DEBUG nova.compute.utils [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 980.197423] env[63538]: DEBUG nova.compute.manager [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 980.197496] env[63538]: DEBUG nova.network.neutron [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 980.204789] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.367125] env[63538]: DEBUG nova.policy [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '87c19c9ce3594acd96c1c215ef8ea555', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '422f50dc66ec48b7b262643390072f3d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 980.403463] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7e053699-6242-4849-963c-ead9e0cd3cab tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "4ec5d3a2-8b29-4074-b323-f94704043b8b" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.403739] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7e053699-6242-4849-963c-ead9e0cd3cab tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "4ec5d3a2-8b29-4074-b323-f94704043b8b" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.403940] env[63538]: DEBUG nova.compute.manager [None req-7e053699-6242-4849-963c-ead9e0cd3cab tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Going to confirm migration 3 {{(pid=63538) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 980.483676] env[63538]: DEBUG nova.compute.manager [req-85222553-c951-414f-aa53-9abc5de8e2d7 req-dfac9115-4013-4875-abf1-40cfb2b15090 service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Received event network-vif-plugged-8240a40a-4486-4213-ac28-8eee15d652a8 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 980.487539] env[63538]: DEBUG oslo_concurrency.lockutils [req-85222553-c951-414f-aa53-9abc5de8e2d7 req-dfac9115-4013-4875-abf1-40cfb2b15090 service nova] Acquiring lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.489292] env[63538]: DEBUG oslo_concurrency.lockutils [req-85222553-c951-414f-aa53-9abc5de8e2d7 req-dfac9115-4013-4875-abf1-40cfb2b15090 service nova] Lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.489292] env[63538]: DEBUG oslo_concurrency.lockutils [req-85222553-c951-414f-aa53-9abc5de8e2d7 req-dfac9115-4013-4875-abf1-40cfb2b15090 service nova] Lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.489292] env[63538]: DEBUG nova.compute.manager [req-85222553-c951-414f-aa53-9abc5de8e2d7 req-dfac9115-4013-4875-abf1-40cfb2b15090 service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] No waiting events found dispatching network-vif-plugged-8240a40a-4486-4213-ac28-8eee15d652a8 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 980.489292] env[63538]: WARNING nova.compute.manager [req-85222553-c951-414f-aa53-9abc5de8e2d7 req-dfac9115-4013-4875-abf1-40cfb2b15090 service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Received unexpected event network-vif-plugged-8240a40a-4486-4213-ac28-8eee15d652a8 for instance with vm_state building and task_state spawning. [ 980.490036] env[63538]: DEBUG nova.network.neutron [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Successfully updated port: 8240a40a-4486-4213-ac28-8eee15d652a8 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 980.657151] env[63538]: DEBUG nova.network.neutron [req-1593ad2a-95d0-43d5-a098-4a3e2c02ccc3 req-42197f7f-7ef5-418f-8211-084b4c0279a3 service nova] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Updated VIF entry in instance network info cache for port cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 980.657151] env[63538]: DEBUG nova.network.neutron [req-1593ad2a-95d0-43d5-a098-4a3e2c02ccc3 req-42197f7f-7ef5-418f-8211-084b4c0279a3 service nova] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Updating instance_info_cache with network_info: [{"id": "cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189", "address": "fa:16:3e:2d:31:f0", "network": {"id": "549f45d6-3d4f-4476-92ba-4bd87fefba51", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-822672880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55edcd65da7b4a569a4c27aab4819cde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ded8bac-871f-491b-94ec-cb67c08bc828", "external-id": "nsx-vlan-transportzone-212", "segmentation_id": 212, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcff0fa7b-e0", "ovs_interfaceid": "cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.707620] env[63538]: DEBUG nova.compute.manager [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 980.980400] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7e053699-6242-4849-963c-ead9e0cd3cab tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "refresh_cache-4ec5d3a2-8b29-4074-b323-f94704043b8b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 980.980698] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7e053699-6242-4849-963c-ead9e0cd3cab tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired lock "refresh_cache-4ec5d3a2-8b29-4074-b323-f94704043b8b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.980784] env[63538]: DEBUG nova.network.neutron [None req-7e053699-6242-4849-963c-ead9e0cd3cab tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 980.980983] env[63538]: DEBUG nova.objects.instance [None req-7e053699-6242-4849-963c-ead9e0cd3cab tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lazy-loading 'info_cache' on Instance uuid 4ec5d3a2-8b29-4074-b323-f94704043b8b {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 980.995373] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "refresh_cache-209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 980.995539] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "refresh_cache-209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.995738] env[63538]: DEBUG nova.network.neutron [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 981.132731] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3690e210-09e9-4a24-99e8-bd56ae0eeadc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.144166] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1653afe-63ff-4d02-824f-53c41191b6ab {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.150228] env[63538]: DEBUG nova.network.neutron [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Successfully created port: dc949e07-b50e-4c8e-8b94-e1c90b211bb7 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 981.181661] env[63538]: DEBUG oslo_concurrency.lockutils [req-1593ad2a-95d0-43d5-a098-4a3e2c02ccc3 req-42197f7f-7ef5-418f-8211-084b4c0279a3 service nova] Releasing lock "refresh_cache-edcc5700-7b1e-494a-82d1-844373a9d5a6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 981.183018] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-464d189a-67f2-4f89-be46-a0ca1b52b17d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.192135] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f16ace5-0bb7-4442-8fe8-23392ea49863 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.207120] env[63538]: DEBUG nova.compute.provider_tree [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 981.584135] env[63538]: DEBUG nova.network.neutron [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 981.604287] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Acquiring lock "4f81dc4e-2092-4a2c-a511-589d47d118b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.604543] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Lock "4f81dc4e-2092-4a2c-a511-589d47d118b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.710676] env[63538]: DEBUG nova.scheduler.client.report [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 981.720288] env[63538]: DEBUG nova.compute.manager [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 981.782036] env[63538]: DEBUG nova.virt.hardware [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='4a39c40fba5ca0a18456c6ceb4f05ea0',container_format='bare',created_at=2025-12-12T12:56:50Z,direct_url=,disk_format='vmdk',id=b299d3c5-b48d-4f1f-a911-692e6a986d0c,min_disk=1,min_ram=0,name='tempest-test-snap-93697781',owner='422f50dc66ec48b7b262643390072f3d',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-12-12T12:57:05Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 981.782036] env[63538]: DEBUG nova.virt.hardware [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 981.782036] env[63538]: DEBUG nova.virt.hardware [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 981.782351] env[63538]: DEBUG nova.virt.hardware [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 981.782567] env[63538]: DEBUG nova.virt.hardware [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 981.782798] env[63538]: DEBUG nova.virt.hardware [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 981.783051] env[63538]: DEBUG nova.virt.hardware [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 981.783231] env[63538]: DEBUG nova.virt.hardware [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 981.783408] env[63538]: DEBUG nova.virt.hardware [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 981.783692] env[63538]: DEBUG nova.virt.hardware [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 981.783753] env[63538]: DEBUG nova.virt.hardware [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 981.784653] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4898878-f028-4878-81ed-7e8c8ad512d2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.795190] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa68e505-d422-4bd1-8e36-1ad3707bf01c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.110866] env[63538]: DEBUG nova.compute.manager [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 982.200686] env[63538]: DEBUG nova.network.neutron [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Updating instance_info_cache with network_info: [{"id": "8240a40a-4486-4213-ac28-8eee15d652a8", "address": "fa:16:3e:41:82:69", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8240a40a-44", "ovs_interfaceid": "8240a40a-4486-4213-ac28-8eee15d652a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.226750] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.553s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.226888] env[63538]: DEBUG nova.compute.manager [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 982.229537] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.908s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.229795] env[63538]: DEBUG nova.objects.instance [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lazy-loading 'resources' on Instance uuid 2e97b357-0200-4aed-9705-dd7808f853ba {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 982.340060] env[63538]: DEBUG nova.network.neutron [None req-7e053699-6242-4849-963c-ead9e0cd3cab tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Updating instance_info_cache with network_info: [{"id": "31cf3b33-b97d-4183-a21c-80e24e27351f", "address": "fa:16:3e:e0:a1:3e", "network": {"id": "8facc2ff-7a17-4beb-ad4f-7cf9d95cc93c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-137157780-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a701618902d411b8af203fdbb1069be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31cf3b33-b9", "ovs_interfaceid": "31cf3b33-b97d-4183-a21c-80e24e27351f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.632021] env[63538]: DEBUG nova.compute.manager [req-61021796-4b95-4168-a9bf-80fc99c8aec4 req-b8a1a5d7-0574-4854-8c71-85e02c46edbc service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Received event network-changed-8240a40a-4486-4213-ac28-8eee15d652a8 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 982.632021] env[63538]: DEBUG nova.compute.manager [req-61021796-4b95-4168-a9bf-80fc99c8aec4 req-b8a1a5d7-0574-4854-8c71-85e02c46edbc service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Refreshing instance network info cache due to event network-changed-8240a40a-4486-4213-ac28-8eee15d652a8. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 982.632021] env[63538]: DEBUG oslo_concurrency.lockutils [req-61021796-4b95-4168-a9bf-80fc99c8aec4 req-b8a1a5d7-0574-4854-8c71-85e02c46edbc service nova] Acquiring lock "refresh_cache-209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.652215] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.704389] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "refresh_cache-209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.704771] env[63538]: DEBUG nova.compute.manager [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Instance network_info: |[{"id": "8240a40a-4486-4213-ac28-8eee15d652a8", "address": "fa:16:3e:41:82:69", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8240a40a-44", "ovs_interfaceid": "8240a40a-4486-4213-ac28-8eee15d652a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 982.705132] env[63538]: DEBUG oslo_concurrency.lockutils [req-61021796-4b95-4168-a9bf-80fc99c8aec4 req-b8a1a5d7-0574-4854-8c71-85e02c46edbc service nova] Acquired lock "refresh_cache-209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.705339] env[63538]: DEBUG nova.network.neutron [req-61021796-4b95-4168-a9bf-80fc99c8aec4 req-b8a1a5d7-0574-4854-8c71-85e02c46edbc service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Refreshing network info cache for port 8240a40a-4486-4213-ac28-8eee15d652a8 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 982.707239] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:82:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '412cde91-d0f0-4193-b36b-d8b9d17384c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8240a40a-4486-4213-ac28-8eee15d652a8', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 982.717304] env[63538]: DEBUG oslo.service.loopingcall [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 982.717304] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 982.718020] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-766cb87a-3fbd-4d95-bf39-56d1ef9c6925 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.738799] env[63538]: DEBUG nova.compute.utils [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 982.740376] env[63538]: DEBUG nova.objects.instance [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lazy-loading 'numa_topology' on Instance uuid 2e97b357-0200-4aed-9705-dd7808f853ba {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 982.743985] env[63538]: DEBUG nova.compute.manager [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 982.743985] env[63538]: DEBUG nova.network.neutron [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 982.751850] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 982.751850] env[63538]: value = "task-5101365" [ 982.751850] env[63538]: _type = "Task" [ 982.751850] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.763244] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101365, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.790347] env[63538]: DEBUG nova.policy [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f315670d336b49d6a732297656ce515a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df090f9a727d4cf4a0f466e27928bdc6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 982.845484] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7e053699-6242-4849-963c-ead9e0cd3cab tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Releasing lock "refresh_cache-4ec5d3a2-8b29-4074-b323-f94704043b8b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.845919] env[63538]: DEBUG nova.objects.instance [None req-7e053699-6242-4849-963c-ead9e0cd3cab tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lazy-loading 'migration_context' on Instance uuid 4ec5d3a2-8b29-4074-b323-f94704043b8b {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 983.155912] env[63538]: DEBUG nova.network.neutron [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Successfully created port: 5250918c-5112-49ad-b1d3-f73c2d534637 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 983.242724] env[63538]: DEBUG nova.compute.manager [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 983.246272] env[63538]: DEBUG nova.objects.base [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Object Instance<2e97b357-0200-4aed-9705-dd7808f853ba> lazy-loaded attributes: resources,numa_topology {{(pid=63538) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 983.264348] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101365, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.359707] env[63538]: DEBUG nova.objects.base [None req-7e053699-6242-4849-963c-ead9e0cd3cab tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Object Instance<4ec5d3a2-8b29-4074-b323-f94704043b8b> lazy-loaded attributes: info_cache,migration_context {{(pid=63538) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 983.361314] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee63b04a-937c-40cd-9972-7334ee75f55f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.391612] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ce04772-65ca-46eb-814c-764feccca9a2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.400286] env[63538]: DEBUG oslo_vmware.api [None req-7e053699-6242-4849-963c-ead9e0cd3cab tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 983.400286] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520cc2cd-d088-719e-e960-6e7efad87e50" [ 983.400286] env[63538]: _type = "Task" [ 983.400286] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.409451] env[63538]: DEBUG oslo_vmware.api [None req-7e053699-6242-4849-963c-ead9e0cd3cab tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520cc2cd-d088-719e-e960-6e7efad87e50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.546602] env[63538]: DEBUG nova.compute.manager [req-174930aa-7b1e-4fc9-aa69-43d6100e115b req-571a209a-8a51-4740-839c-e3ea46499fa8 service nova] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Received event network-vif-plugged-dc949e07-b50e-4c8e-8b94-e1c90b211bb7 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 983.547739] env[63538]: DEBUG oslo_concurrency.lockutils [req-174930aa-7b1e-4fc9-aa69-43d6100e115b req-571a209a-8a51-4740-839c-e3ea46499fa8 service nova] Acquiring lock "0339c969-ad97-47b1-8fab-ee595738d9df-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.547739] env[63538]: DEBUG oslo_concurrency.lockutils [req-174930aa-7b1e-4fc9-aa69-43d6100e115b req-571a209a-8a51-4740-839c-e3ea46499fa8 service nova] Lock "0339c969-ad97-47b1-8fab-ee595738d9df-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.547739] env[63538]: DEBUG oslo_concurrency.lockutils [req-174930aa-7b1e-4fc9-aa69-43d6100e115b req-571a209a-8a51-4740-839c-e3ea46499fa8 service nova] Lock "0339c969-ad97-47b1-8fab-ee595738d9df-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.547739] env[63538]: DEBUG nova.compute.manager [req-174930aa-7b1e-4fc9-aa69-43d6100e115b req-571a209a-8a51-4740-839c-e3ea46499fa8 service nova] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] No waiting events found dispatching network-vif-plugged-dc949e07-b50e-4c8e-8b94-e1c90b211bb7 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 983.547946] env[63538]: WARNING nova.compute.manager [req-174930aa-7b1e-4fc9-aa69-43d6100e115b req-571a209a-8a51-4740-839c-e3ea46499fa8 service nova] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Received unexpected event network-vif-plugged-dc949e07-b50e-4c8e-8b94-e1c90b211bb7 for instance with vm_state building and task_state spawning. [ 983.664026] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb93dfd-4d0e-4d45-90af-e33fc6556595 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.673467] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b6c1728-7a21-402e-bc3c-faa2b6a845fc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.680515] env[63538]: DEBUG nova.network.neutron [req-61021796-4b95-4168-a9bf-80fc99c8aec4 req-b8a1a5d7-0574-4854-8c71-85e02c46edbc service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Updated VIF entry in instance network info cache for port 8240a40a-4486-4213-ac28-8eee15d652a8. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 983.680911] env[63538]: DEBUG nova.network.neutron [req-61021796-4b95-4168-a9bf-80fc99c8aec4 req-b8a1a5d7-0574-4854-8c71-85e02c46edbc service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Updating instance_info_cache with network_info: [{"id": "8240a40a-4486-4213-ac28-8eee15d652a8", "address": "fa:16:3e:41:82:69", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8240a40a-44", "ovs_interfaceid": "8240a40a-4486-4213-ac28-8eee15d652a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.716799] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0bb970f-1d7b-45e3-bd53-5ba32f3d6d32 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.727205] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9692b48d-4bf6-45e5-a4a7-12a41c944406 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.745211] env[63538]: DEBUG nova.compute.provider_tree [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 983.757933] env[63538]: DEBUG nova.network.neutron [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Successfully updated port: dc949e07-b50e-4c8e-8b94-e1c90b211bb7 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 983.774915] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101365, 'name': CreateVM_Task, 'duration_secs': 0.553313} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.776329] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 983.777114] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.777297] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.777673] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 983.777943] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7cecfe9b-dfca-4b3a-9933-063c53c67e32 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.784066] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 983.784066] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520235fa-608f-66f9-b810-716d04c2d187" [ 983.784066] env[63538]: _type = "Task" [ 983.784066] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.799067] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520235fa-608f-66f9-b810-716d04c2d187, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.913648] env[63538]: DEBUG oslo_vmware.api [None req-7e053699-6242-4849-963c-ead9e0cd3cab tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520cc2cd-d088-719e-e960-6e7efad87e50, 'name': SearchDatastore_Task, 'duration_secs': 0.012065} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.914100] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7e053699-6242-4849-963c-ead9e0cd3cab tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.184289] env[63538]: DEBUG oslo_concurrency.lockutils [req-61021796-4b95-4168-a9bf-80fc99c8aec4 req-b8a1a5d7-0574-4854-8c71-85e02c46edbc service nova] Releasing lock "refresh_cache-209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.252812] env[63538]: DEBUG nova.scheduler.client.report [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 984.260792] env[63538]: DEBUG nova.compute.manager [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 984.263742] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "refresh_cache-0339c969-ad97-47b1-8fab-ee595738d9df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.263742] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquired lock "refresh_cache-0339c969-ad97-47b1-8fab-ee595738d9df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.263742] env[63538]: DEBUG nova.network.neutron [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 984.293598] env[63538]: DEBUG nova.virt.hardware [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 984.293864] env[63538]: DEBUG nova.virt.hardware [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 984.294045] env[63538]: DEBUG nova.virt.hardware [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 984.294243] env[63538]: DEBUG nova.virt.hardware [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 984.294395] env[63538]: DEBUG nova.virt.hardware [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 984.294547] env[63538]: DEBUG nova.virt.hardware [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 984.294756] env[63538]: DEBUG nova.virt.hardware [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 984.294919] env[63538]: DEBUG nova.virt.hardware [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 984.295181] env[63538]: DEBUG nova.virt.hardware [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 984.295423] env[63538]: DEBUG nova.virt.hardware [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 984.295594] env[63538]: DEBUG nova.virt.hardware [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 984.296631] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-694a2f08-ef00-4d7c-a41b-656bee91c2ed {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.304491] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520235fa-608f-66f9-b810-716d04c2d187, 'name': SearchDatastore_Task, 'duration_secs': 0.020758} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.305541] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.305886] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 984.306008] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.306179] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.306381] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 984.307482] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-123bc1ea-d533-4eca-a16c-f781ff681a0d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.313594] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca6d9b32-0c97-4a77-8bb2-e70ca03cdf3d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.320824] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 984.320824] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 984.329391] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b47775f5-d463-4d99-8807-2c02d54edcca {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.336299] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 984.336299] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b4729e-aab7-75cf-87ae-67f6f4993bcd" [ 984.336299] env[63538]: _type = "Task" [ 984.336299] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.348140] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b4729e-aab7-75cf-87ae-67f6f4993bcd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.685646] env[63538]: DEBUG oslo_concurrency.lockutils [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Acquiring lock "144df97e-f47b-4ead-8243-345d98b9f3e6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.685905] env[63538]: DEBUG oslo_concurrency.lockutils [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Lock "144df97e-f47b-4ead-8243-345d98b9f3e6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.734915] env[63538]: DEBUG nova.compute.manager [req-a4603725-cdc7-49c3-b06f-cc4da568c3c6 req-5905b3ea-4908-453a-8e94-300f1598d3d0 service nova] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Received event network-vif-plugged-5250918c-5112-49ad-b1d3-f73c2d534637 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 984.734915] env[63538]: DEBUG oslo_concurrency.lockutils [req-a4603725-cdc7-49c3-b06f-cc4da568c3c6 req-5905b3ea-4908-453a-8e94-300f1598d3d0 service nova] Acquiring lock "fb26fb32-a420-4667-850c-e32786edd8f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.734915] env[63538]: DEBUG oslo_concurrency.lockutils [req-a4603725-cdc7-49c3-b06f-cc4da568c3c6 req-5905b3ea-4908-453a-8e94-300f1598d3d0 service nova] Lock "fb26fb32-a420-4667-850c-e32786edd8f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.734915] env[63538]: DEBUG oslo_concurrency.lockutils [req-a4603725-cdc7-49c3-b06f-cc4da568c3c6 req-5905b3ea-4908-453a-8e94-300f1598d3d0 service nova] Lock "fb26fb32-a420-4667-850c-e32786edd8f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.734915] env[63538]: DEBUG nova.compute.manager [req-a4603725-cdc7-49c3-b06f-cc4da568c3c6 req-5905b3ea-4908-453a-8e94-300f1598d3d0 service nova] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] No waiting events found dispatching network-vif-plugged-5250918c-5112-49ad-b1d3-f73c2d534637 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 984.735437] env[63538]: WARNING nova.compute.manager [req-a4603725-cdc7-49c3-b06f-cc4da568c3c6 req-5905b3ea-4908-453a-8e94-300f1598d3d0 service nova] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Received unexpected event network-vif-plugged-5250918c-5112-49ad-b1d3-f73c2d534637 for instance with vm_state building and task_state spawning. [ 984.758067] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.528s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.761126] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.028s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.761231] env[63538]: DEBUG nova.objects.instance [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Lazy-loading 'resources' on Instance uuid 8097cb1c-bbba-45a8-be81-64d38decb1df {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 984.821669] env[63538]: DEBUG nova.network.neutron [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 984.835585] env[63538]: DEBUG nova.network.neutron [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Successfully updated port: 5250918c-5112-49ad-b1d3-f73c2d534637 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 984.846813] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b4729e-aab7-75cf-87ae-67f6f4993bcd, 'name': SearchDatastore_Task, 'duration_secs': 0.010529} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.850336] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9db2a0c-7900-49fc-ac05-56c7e38cfc6b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.857535] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 984.857535] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5276f67b-4fd6-86f6-2852-d6d24bfb0733" [ 984.857535] env[63538]: _type = "Task" [ 984.857535] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.866291] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5276f67b-4fd6-86f6-2852-d6d24bfb0733, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.002501] env[63538]: DEBUG nova.network.neutron [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Updating instance_info_cache with network_info: [{"id": "dc949e07-b50e-4c8e-8b94-e1c90b211bb7", "address": "fa:16:3e:b2:6d:f8", "network": {"id": "4b4aacee-e2db-457b-9900-6c94f101831e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1899782517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "422f50dc66ec48b7b262643390072f3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc949e07-b5", "ovs_interfaceid": "dc949e07-b50e-4c8e-8b94-e1c90b211bb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.188321] env[63538]: DEBUG nova.compute.manager [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 985.272946] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dcf33280-7b12-42a1-84be-beafc6e19643 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "2e97b357-0200-4aed-9705-dd7808f853ba" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 41.380s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.273815] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fbc810de-59fd-42b2-92f5-ca6dfc6c1b51 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "2e97b357-0200-4aed-9705-dd7808f853ba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 19.492s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.275220] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fbc810de-59fd-42b2-92f5-ca6dfc6c1b51 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "2e97b357-0200-4aed-9705-dd7808f853ba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.278874] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fbc810de-59fd-42b2-92f5-ca6dfc6c1b51 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "2e97b357-0200-4aed-9705-dd7808f853ba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.278874] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fbc810de-59fd-42b2-92f5-ca6dfc6c1b51 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "2e97b357-0200-4aed-9705-dd7808f853ba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.280836] env[63538]: INFO nova.compute.manager [None req-fbc810de-59fd-42b2-92f5-ca6dfc6c1b51 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Terminating instance [ 985.283032] env[63538]: DEBUG nova.compute.manager [None req-fbc810de-59fd-42b2-92f5-ca6dfc6c1b51 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 985.283586] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fbc810de-59fd-42b2-92f5-ca6dfc6c1b51 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 985.283586] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a3c7762f-9fa0-41ee-89fe-38573fba87bb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.300747] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b3ef58-db91-493e-a3d9-fd033a10733c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.350242] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "refresh_cache-fb26fb32-a420-4667-850c-e32786edd8f2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.350242] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquired lock "refresh_cache-fb26fb32-a420-4667-850c-e32786edd8f2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.350384] env[63538]: DEBUG nova.network.neutron [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 985.354390] env[63538]: WARNING nova.virt.vmwareapi.vmops [None req-fbc810de-59fd-42b2-92f5-ca6dfc6c1b51 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2e97b357-0200-4aed-9705-dd7808f853ba could not be found. [ 985.354390] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fbc810de-59fd-42b2-92f5-ca6dfc6c1b51 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 985.354390] env[63538]: INFO nova.compute.manager [None req-fbc810de-59fd-42b2-92f5-ca6dfc6c1b51 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Took 0.07 seconds to destroy the instance on the hypervisor. [ 985.354390] env[63538]: DEBUG oslo.service.loopingcall [None req-fbc810de-59fd-42b2-92f5-ca6dfc6c1b51 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 985.354390] env[63538]: DEBUG nova.compute.manager [-] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 985.354390] env[63538]: DEBUG nova.network.neutron [-] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 985.373204] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5276f67b-4fd6-86f6-2852-d6d24bfb0733, 'name': SearchDatastore_Task, 'duration_secs': 0.010063} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.373498] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.373771] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 209c5f46-9c63-4f55-bc75-bc2e4da989ac/209c5f46-9c63-4f55-bc75-bc2e4da989ac.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 985.374063] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c055bda8-2fcf-402c-ab06-796f066ac1ee {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.382334] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 985.382334] env[63538]: value = "task-5101366" [ 985.382334] env[63538]: _type = "Task" [ 985.382334] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.393127] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101366, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.506797] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Releasing lock "refresh_cache-0339c969-ad97-47b1-8fab-ee595738d9df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.506797] env[63538]: DEBUG nova.compute.manager [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Instance network_info: |[{"id": "dc949e07-b50e-4c8e-8b94-e1c90b211bb7", "address": "fa:16:3e:b2:6d:f8", "network": {"id": "4b4aacee-e2db-457b-9900-6c94f101831e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1899782517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "422f50dc66ec48b7b262643390072f3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc949e07-b5", "ovs_interfaceid": "dc949e07-b50e-4c8e-8b94-e1c90b211bb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 985.507217] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:6d:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f972c061-0cd5-4aed-8cfb-42cc4a08835a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dc949e07-b50e-4c8e-8b94-e1c90b211bb7', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 985.516314] env[63538]: DEBUG oslo.service.loopingcall [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 985.519584] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 985.520365] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e1efe6ff-1aa1-404a-a43a-ee099c1922fc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.557243] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 985.557243] env[63538]: value = "task-5101367" [ 985.557243] env[63538]: _type = "Task" [ 985.557243] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.577291] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101367, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.610976] env[63538]: DEBUG nova.compute.manager [req-77ad48d1-8e00-4b40-979d-44450caec9b1 req-d57a2ea1-332a-40d6-ac16-96d9e72e8334 service nova] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Received event network-changed-dc949e07-b50e-4c8e-8b94-e1c90b211bb7 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 985.611222] env[63538]: DEBUG nova.compute.manager [req-77ad48d1-8e00-4b40-979d-44450caec9b1 req-d57a2ea1-332a-40d6-ac16-96d9e72e8334 service nova] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Refreshing instance network info cache due to event network-changed-dc949e07-b50e-4c8e-8b94-e1c90b211bb7. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 985.611461] env[63538]: DEBUG oslo_concurrency.lockutils [req-77ad48d1-8e00-4b40-979d-44450caec9b1 req-d57a2ea1-332a-40d6-ac16-96d9e72e8334 service nova] Acquiring lock "refresh_cache-0339c969-ad97-47b1-8fab-ee595738d9df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.611607] env[63538]: DEBUG oslo_concurrency.lockutils [req-77ad48d1-8e00-4b40-979d-44450caec9b1 req-d57a2ea1-332a-40d6-ac16-96d9e72e8334 service nova] Acquired lock "refresh_cache-0339c969-ad97-47b1-8fab-ee595738d9df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.611797] env[63538]: DEBUG nova.network.neutron [req-77ad48d1-8e00-4b40-979d-44450caec9b1 req-d57a2ea1-332a-40d6-ac16-96d9e72e8334 service nova] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Refreshing network info cache for port dc949e07-b50e-4c8e-8b94-e1c90b211bb7 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 985.718234] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ffd226-fa40-48be-8072-6c6bff4c7c16 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.726144] env[63538]: DEBUG oslo_concurrency.lockutils [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.728296] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff64990c-8a84-4a5c-a446-5d134676a244 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.765914] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cac0dab-55b0-44ae-b50b-3cb77f7e0d84 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.774661] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f742dd-0a31-4e20-9ec3-4cf74ae9bb5f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.791765] env[63538]: DEBUG nova.compute.provider_tree [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 985.888314] env[63538]: DEBUG nova.network.neutron [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 985.896724] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101366, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.063842] env[63538]: DEBUG nova.network.neutron [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Updating instance_info_cache with network_info: [{"id": "5250918c-5112-49ad-b1d3-f73c2d534637", "address": "fa:16:3e:6d:44:6f", "network": {"id": "4d592811-793b-45d6-a6d3-fff8ca7bbf30", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-654803980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df090f9a727d4cf4a0f466e27928bdc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5250918c-51", "ovs_interfaceid": "5250918c-5112-49ad-b1d3-f73c2d534637", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.073029] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101367, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.192154] env[63538]: DEBUG nova.network.neutron [-] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.295281] env[63538]: DEBUG nova.scheduler.client.report [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 986.387518] env[63538]: DEBUG nova.network.neutron [req-77ad48d1-8e00-4b40-979d-44450caec9b1 req-d57a2ea1-332a-40d6-ac16-96d9e72e8334 service nova] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Updated VIF entry in instance network info cache for port dc949e07-b50e-4c8e-8b94-e1c90b211bb7. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 986.387950] env[63538]: DEBUG nova.network.neutron [req-77ad48d1-8e00-4b40-979d-44450caec9b1 req-d57a2ea1-332a-40d6-ac16-96d9e72e8334 service nova] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Updating instance_info_cache with network_info: [{"id": "dc949e07-b50e-4c8e-8b94-e1c90b211bb7", "address": "fa:16:3e:b2:6d:f8", "network": {"id": "4b4aacee-e2db-457b-9900-6c94f101831e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1899782517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "422f50dc66ec48b7b262643390072f3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc949e07-b5", "ovs_interfaceid": "dc949e07-b50e-4c8e-8b94-e1c90b211bb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.397351] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101366, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552624} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.398194] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 209c5f46-9c63-4f55-bc75-bc2e4da989ac/209c5f46-9c63-4f55-bc75-bc2e4da989ac.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 986.398473] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 986.398765] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-41757b81-8a21-48de-b8a3-318890de6a2d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.406700] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 986.406700] env[63538]: value = "task-5101368" [ 986.406700] env[63538]: _type = "Task" [ 986.406700] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.415943] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101368, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.568434] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Releasing lock "refresh_cache-fb26fb32-a420-4667-850c-e32786edd8f2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.568760] env[63538]: DEBUG nova.compute.manager [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Instance network_info: |[{"id": "5250918c-5112-49ad-b1d3-f73c2d534637", "address": "fa:16:3e:6d:44:6f", "network": {"id": "4d592811-793b-45d6-a6d3-fff8ca7bbf30", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-654803980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df090f9a727d4cf4a0f466e27928bdc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5250918c-51", "ovs_interfaceid": "5250918c-5112-49ad-b1d3-f73c2d534637", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 986.569039] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101367, 'name': CreateVM_Task, 'duration_secs': 0.609926} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.569404] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:44:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c621a9c-66f5-426a-8aab-bd8b2e912106', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5250918c-5112-49ad-b1d3-f73c2d534637', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 986.576844] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Creating folder: Project (df090f9a727d4cf4a0f466e27928bdc6). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 986.577042] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 986.577911] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-26930d7f-775c-4334-987d-9df4cc6b7ec6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.579614] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b299d3c5-b48d-4f1f-a911-692e6a986d0c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 986.579787] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b299d3c5-b48d-4f1f-a911-692e6a986d0c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.580191] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b299d3c5-b48d-4f1f-a911-692e6a986d0c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 986.580475] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdc03312-6a35-4072-8587-499fcddc70cd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.586613] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 986.586613] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e31aa4-803a-250f-08d4-e0f29543c028" [ 986.586613] env[63538]: _type = "Task" [ 986.586613] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.592920] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Created folder: Project (df090f9a727d4cf4a0f466e27928bdc6) in parent group-v992234. [ 986.592920] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Creating folder: Instances. Parent ref: group-v992461. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 986.596055] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-987be1f3-f715-4269-8ff7-6b4c94d2eba3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.597718] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e31aa4-803a-250f-08d4-e0f29543c028, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.606574] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Created folder: Instances in parent group-v992461. [ 986.606846] env[63538]: DEBUG oslo.service.loopingcall [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 986.607066] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 986.607298] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-05bada18-54dc-444e-8001-31c644a0db10 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.629474] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 986.629474] env[63538]: value = "task-5101371" [ 986.629474] env[63538]: _type = "Task" [ 986.629474] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.638188] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101371, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.694056] env[63538]: INFO nova.compute.manager [-] [instance: 2e97b357-0200-4aed-9705-dd7808f853ba] Took 1.34 seconds to deallocate network for instance. [ 986.784266] env[63538]: DEBUG nova.compute.manager [req-bdb8beb2-11a9-47fa-972c-18f6ebdbef5e req-61bafc6f-6a52-459e-900c-f02e88f52854 service nova] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Received event network-changed-5250918c-5112-49ad-b1d3-f73c2d534637 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 986.784507] env[63538]: DEBUG nova.compute.manager [req-bdb8beb2-11a9-47fa-972c-18f6ebdbef5e req-61bafc6f-6a52-459e-900c-f02e88f52854 service nova] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Refreshing instance network info cache due to event network-changed-5250918c-5112-49ad-b1d3-f73c2d534637. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 986.784730] env[63538]: DEBUG oslo_concurrency.lockutils [req-bdb8beb2-11a9-47fa-972c-18f6ebdbef5e req-61bafc6f-6a52-459e-900c-f02e88f52854 service nova] Acquiring lock "refresh_cache-fb26fb32-a420-4667-850c-e32786edd8f2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 986.784876] env[63538]: DEBUG oslo_concurrency.lockutils [req-bdb8beb2-11a9-47fa-972c-18f6ebdbef5e req-61bafc6f-6a52-459e-900c-f02e88f52854 service nova] Acquired lock "refresh_cache-fb26fb32-a420-4667-850c-e32786edd8f2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.785370] env[63538]: DEBUG nova.network.neutron [req-bdb8beb2-11a9-47fa-972c-18f6ebdbef5e req-61bafc6f-6a52-459e-900c-f02e88f52854 service nova] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Refreshing network info cache for port 5250918c-5112-49ad-b1d3-f73c2d534637 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 986.802198] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.041s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.805090] env[63538]: DEBUG oslo_concurrency.lockutils [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.046s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.805331] env[63538]: DEBUG nova.objects.instance [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lazy-loading 'resources' on Instance uuid cf72ac3d-4051-428a-b5bc-7f28accb13c0 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 986.830560] env[63538]: INFO nova.scheduler.client.report [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Deleted allocations for instance 8097cb1c-bbba-45a8-be81-64d38decb1df [ 986.892042] env[63538]: DEBUG oslo_concurrency.lockutils [req-77ad48d1-8e00-4b40-979d-44450caec9b1 req-d57a2ea1-332a-40d6-ac16-96d9e72e8334 service nova] Releasing lock "refresh_cache-0339c969-ad97-47b1-8fab-ee595738d9df" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.918358] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101368, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068517} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.919063] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 986.920032] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50f4af7a-b96a-4045-ab6c-ecaa997ce6f5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.945797] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] 209c5f46-9c63-4f55-bc75-bc2e4da989ac/209c5f46-9c63-4f55-bc75-bc2e4da989ac.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 986.946581] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e1e43f00-775c-44cf-a08d-ffcfddb21ee0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.969269] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 986.969269] env[63538]: value = "task-5101372" [ 986.969269] env[63538]: _type = "Task" [ 986.969269] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.978956] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101372, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.100907] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b299d3c5-b48d-4f1f-a911-692e6a986d0c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 987.101210] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Processing image b299d3c5-b48d-4f1f-a911-692e6a986d0c {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 987.101460] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b299d3c5-b48d-4f1f-a911-692e6a986d0c/b299d3c5-b48d-4f1f-a911-692e6a986d0c.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.101616] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b299d3c5-b48d-4f1f-a911-692e6a986d0c/b299d3c5-b48d-4f1f-a911-692e6a986d0c.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.101799] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 987.102066] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8916ef33-32d8-42d4-9159-d7d4300d359b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.111355] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 987.111545] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 987.112306] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e98c2a6-de67-4c93-8ccd-37dc609905fd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.118529] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 987.118529] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f66fba-1205-4ee5-5c14-b4420a4438e6" [ 987.118529] env[63538]: _type = "Task" [ 987.118529] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.127208] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f66fba-1205-4ee5-5c14-b4420a4438e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.139502] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101371, 'name': CreateVM_Task, 'duration_secs': 0.503978} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.139693] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 987.140356] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.140534] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.140875] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 987.141151] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38d0e503-17d2-401f-a532-a2c39dda4190 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.146175] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 987.146175] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52faa593-ca88-2829-e50f-7574adb1e9ed" [ 987.146175] env[63538]: _type = "Task" [ 987.146175] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.155331] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52faa593-ca88-2829-e50f-7574adb1e9ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.340461] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ee076f8d-53c9-4db1-9ed3-010cdc5776b6 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Lock "8097cb1c-bbba-45a8-be81-64d38decb1df" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.574s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.484374] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101372, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.583802] env[63538]: INFO nova.compute.manager [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Rebuilding instance [ 987.612035] env[63538]: DEBUG nova.network.neutron [req-bdb8beb2-11a9-47fa-972c-18f6ebdbef5e req-61bafc6f-6a52-459e-900c-f02e88f52854 service nova] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Updated VIF entry in instance network info cache for port 5250918c-5112-49ad-b1d3-f73c2d534637. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 987.612429] env[63538]: DEBUG nova.network.neutron [req-bdb8beb2-11a9-47fa-972c-18f6ebdbef5e req-61bafc6f-6a52-459e-900c-f02e88f52854 service nova] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Updating instance_info_cache with network_info: [{"id": "5250918c-5112-49ad-b1d3-f73c2d534637", "address": "fa:16:3e:6d:44:6f", "network": {"id": "4d592811-793b-45d6-a6d3-fff8ca7bbf30", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-654803980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df090f9a727d4cf4a0f466e27928bdc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5250918c-51", "ovs_interfaceid": "5250918c-5112-49ad-b1d3-f73c2d534637", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.632593] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Preparing fetch location {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 987.632858] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Fetch image to [datastore2] OSTACK_IMG_aa074a5e-caf6-4dbd-a8d5-a33508687552/OSTACK_IMG_aa074a5e-caf6-4dbd-a8d5-a33508687552.vmdk {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 987.633111] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Downloading stream optimized image b299d3c5-b48d-4f1f-a911-692e6a986d0c to [datastore2] OSTACK_IMG_aa074a5e-caf6-4dbd-a8d5-a33508687552/OSTACK_IMG_aa074a5e-caf6-4dbd-a8d5-a33508687552.vmdk on the data store datastore2 as vApp {{(pid=63538) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 987.633341] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Downloading image file data b299d3c5-b48d-4f1f-a911-692e6a986d0c to the ESX as VM named 'OSTACK_IMG_aa074a5e-caf6-4dbd-a8d5-a33508687552' {{(pid=63538) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 987.635896] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2208563d-e726-4a70-89a4-3aefa82a175f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.641783] env[63538]: DEBUG nova.compute.manager [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 987.642659] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e0398ca-57ca-4620-85e8-365a73be5b37 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.651541] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c42d362-5cf3-4db0-b957-9761ee1a3bec {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.666665] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52faa593-ca88-2829-e50f-7574adb1e9ed, 'name': SearchDatastore_Task, 'duration_secs': 0.010368} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.693910] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 987.694325] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 987.694651] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.694816] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.695015] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 987.717960] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e005867f-d221-4e6e-aae1-8a9081e8970f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.721135] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d262e4a7-ab79-4720-9c68-1ab858e8d667 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.727881] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fbc810de-59fd-42b2-92f5-ca6dfc6c1b51 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "2e97b357-0200-4aed-9705-dd7808f853ba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.454s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.736021] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd855ac-404a-4a60-959f-4833449439f4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.739195] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 987.739453] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 987.740681] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9511d06e-3053-4728-84e0-55ac7cf3fa68 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.752390] env[63538]: DEBUG nova.compute.provider_tree [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 987.760216] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 987.760216] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521bc8d8-ef4e-70fb-4b80-a5de3ef53720" [ 987.760216] env[63538]: _type = "Task" [ 987.760216] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.771090] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521bc8d8-ef4e-70fb-4b80-a5de3ef53720, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.781284] env[63538]: DEBUG oslo_vmware.rw_handles [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 987.781284] env[63538]: value = "resgroup-9" [ 987.781284] env[63538]: _type = "ResourcePool" [ 987.781284] env[63538]: }. {{(pid=63538) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 987.781579] env[63538]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-281f3f7f-e5c1-4087-88c1-9b49b1dad041 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.804642] env[63538]: DEBUG oslo_vmware.rw_handles [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lease: (returnval){ [ 987.804642] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521214d3-b5fa-75b9-acdd-d0c5789213cf" [ 987.804642] env[63538]: _type = "HttpNfcLease" [ 987.804642] env[63538]: } obtained for vApp import into resource pool (val){ [ 987.804642] env[63538]: value = "resgroup-9" [ 987.804642] env[63538]: _type = "ResourcePool" [ 987.804642] env[63538]: }. {{(pid=63538) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 987.805097] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the lease: (returnval){ [ 987.805097] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521214d3-b5fa-75b9-acdd-d0c5789213cf" [ 987.805097] env[63538]: _type = "HttpNfcLease" [ 987.805097] env[63538]: } to be ready. {{(pid=63538) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 987.812613] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 987.812613] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521214d3-b5fa-75b9-acdd-d0c5789213cf" [ 987.812613] env[63538]: _type = "HttpNfcLease" [ 987.812613] env[63538]: } is initializing. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 987.981678] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101372, 'name': ReconfigVM_Task, 'duration_secs': 0.787747} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.981961] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Reconfigured VM instance instance-00000055 to attach disk [datastore2] 209c5f46-9c63-4f55-bc75-bc2e4da989ac/209c5f46-9c63-4f55-bc75-bc2e4da989ac.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 987.982645] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-26ef231d-1e4f-462e-b51c-53d137b8bb20 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.990627] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 987.990627] env[63538]: value = "task-5101374" [ 987.990627] env[63538]: _type = "Task" [ 987.990627] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.001638] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101374, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.118483] env[63538]: DEBUG oslo_concurrency.lockutils [req-bdb8beb2-11a9-47fa-972c-18f6ebdbef5e req-61bafc6f-6a52-459e-900c-f02e88f52854 service nova] Releasing lock "refresh_cache-fb26fb32-a420-4667-850c-e32786edd8f2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.164346] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 988.164550] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-818481b9-2299-4dcd-908a-c34f9274f347 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.171559] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 988.171559] env[63538]: value = "task-5101375" [ 988.171559] env[63538]: _type = "Task" [ 988.171559] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.181617] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101375, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.258699] env[63538]: DEBUG nova.scheduler.client.report [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 988.274403] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521bc8d8-ef4e-70fb-4b80-a5de3ef53720, 'name': SearchDatastore_Task, 'duration_secs': 0.010248} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.275288] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f101585f-8017-4f0f-bc6e-15ecb0d9917d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.282333] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 988.282333] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5264c799-b1a2-6ee5-6f57-82626f115199" [ 988.282333] env[63538]: _type = "Task" [ 988.282333] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.292994] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5264c799-b1a2-6ee5-6f57-82626f115199, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.313879] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 988.313879] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521214d3-b5fa-75b9-acdd-d0c5789213cf" [ 988.313879] env[63538]: _type = "HttpNfcLease" [ 988.313879] env[63538]: } is initializing. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 988.501017] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101374, 'name': Rename_Task, 'duration_secs': 0.146128} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.506368] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 988.506368] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f87d7eb-1a15-4e5d-9861-1128ec37029c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.513951] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 988.513951] env[63538]: value = "task-5101376" [ 988.513951] env[63538]: _type = "Task" [ 988.513951] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.523989] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101376, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.682751] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101375, 'name': PowerOffVM_Task, 'duration_secs': 0.230096} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.683166] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 988.683466] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 988.684284] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0698027-fcdf-4734-a97d-ef4f6c09c26a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.691809] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 988.692105] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f71614dc-3114-4781-aa54-9c6ec88d5fb0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.759071] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 988.759343] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 988.759560] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Deleting the datastore file [datastore2] 8ed0bd15-71fc-435e-9e4a-90b023ad8a79 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 988.759852] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7eaaf182-9674-41a7-9bce-5abb6f16fa98 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.767095] env[63538]: DEBUG oslo_concurrency.lockutils [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.962s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.770851] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.855s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.771114] env[63538]: DEBUG nova.objects.instance [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lazy-loading 'resources' on Instance uuid 376ee3d9-e8b5-4f47-9622-b873126b492e {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 988.772615] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 988.772615] env[63538]: value = "task-5101378" [ 988.772615] env[63538]: _type = "Task" [ 988.772615] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.783597] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101378, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.795499] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5264c799-b1a2-6ee5-6f57-82626f115199, 'name': SearchDatastore_Task, 'duration_secs': 0.011348} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.796699] env[63538]: INFO nova.scheduler.client.report [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Deleted allocations for instance cf72ac3d-4051-428a-b5bc-7f28accb13c0 [ 988.797922] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.797922] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] fb26fb32-a420-4667-850c-e32786edd8f2/fb26fb32-a420-4667-850c-e32786edd8f2.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 988.800856] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-80899376-283d-436c-9c84-a10501cfb218 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.810894] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 988.810894] env[63538]: value = "task-5101379" [ 988.810894] env[63538]: _type = "Task" [ 988.810894] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.816253] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 988.816253] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521214d3-b5fa-75b9-acdd-d0c5789213cf" [ 988.816253] env[63538]: _type = "HttpNfcLease" [ 988.816253] env[63538]: } is ready. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 988.817979] env[63538]: DEBUG oslo_vmware.rw_handles [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 988.817979] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521214d3-b5fa-75b9-acdd-d0c5789213cf" [ 988.817979] env[63538]: _type = "HttpNfcLease" [ 988.817979] env[63538]: }. {{(pid=63538) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 988.817979] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80c2779a-b4a5-4bf1-b0f2-b9bee5908836 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.824797] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101379, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.831667] env[63538]: DEBUG oslo_vmware.rw_handles [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523e84b9-d73d-1270-a723-e2b926a18490/disk-0.vmdk from lease info. {{(pid=63538) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 988.831862] env[63538]: DEBUG oslo_vmware.rw_handles [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523e84b9-d73d-1270-a723-e2b926a18490/disk-0.vmdk. {{(pid=63538) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 988.898084] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-30f17f10-8f60-421d-8a93-351afbcda3d8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.029429] env[63538]: DEBUG oslo_vmware.api [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101376, 'name': PowerOnVM_Task, 'duration_secs': 0.475563} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.029952] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 989.030058] env[63538]: INFO nova.compute.manager [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Took 9.84 seconds to spawn the instance on the hypervisor. [ 989.030259] env[63538]: DEBUG nova.compute.manager [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 989.031299] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1610f32d-6822-423d-9c68-6b4d0e2cd13c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.064673] env[63538]: DEBUG oslo_concurrency.lockutils [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquiring lock "e79a9eeb-a4c4-4613-bc43-4e40103addf9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.064987] env[63538]: DEBUG oslo_concurrency.lockutils [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Lock "e79a9eeb-a4c4-4613-bc43-4e40103addf9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.065250] env[63538]: DEBUG oslo_concurrency.lockutils [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquiring lock "e79a9eeb-a4c4-4613-bc43-4e40103addf9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.065529] env[63538]: DEBUG oslo_concurrency.lockutils [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Lock "e79a9eeb-a4c4-4613-bc43-4e40103addf9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.065728] env[63538]: DEBUG oslo_concurrency.lockutils [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Lock "e79a9eeb-a4c4-4613-bc43-4e40103addf9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.068580] env[63538]: INFO nova.compute.manager [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Terminating instance [ 989.070975] env[63538]: DEBUG nova.compute.manager [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 989.070975] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 989.071887] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c768fe71-ed2d-4337-abd6-e4e5d1904517 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.083151] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 989.083503] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-414f79f7-1803-4d3c-84ad-68533475afd3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.092594] env[63538]: DEBUG oslo_vmware.api [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Waiting for the task: (returnval){ [ 989.092594] env[63538]: value = "task-5101380" [ 989.092594] env[63538]: _type = "Task" [ 989.092594] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.104214] env[63538]: DEBUG oslo_vmware.api [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101380, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.295512] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101378, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18618} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.296136] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 989.296729] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 989.297120] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 989.309184] env[63538]: DEBUG oslo_concurrency.lockutils [None req-03e423d6-b75a-4ba4-a34b-ce107d629a6d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "cf72ac3d-4051-428a-b5bc-7f28accb13c0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.300s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.332338] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101379, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.555542] env[63538]: INFO nova.compute.manager [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Took 29.75 seconds to build instance. [ 989.604522] env[63538]: DEBUG oslo_vmware.api [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101380, 'name': PowerOffVM_Task, 'duration_secs': 0.31345} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.604848] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 989.605080] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 989.605316] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-67c5d78a-d222-4352-aa76-1142b8b3bf50 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.635739] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee5f522d-c788-4085-8b46-3538f631d821 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.644187] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66fae7ad-156f-443d-bfd9-591596f9f3fe {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.684027] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe16db2c-4ddb-44b3-97d7-410867c3c0ec {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.691208] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 989.691998] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 989.691998] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Deleting the datastore file [datastore2] e79a9eeb-a4c4-4613-bc43-4e40103addf9 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 989.694585] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-72a4020d-d951-4797-91ce-27eff3b31d78 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.701975] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ee227e-5eda-4f72-9e9f-bc749474caec {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.711133] env[63538]: DEBUG oslo_vmware.api [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Waiting for the task: (returnval){ [ 989.711133] env[63538]: value = "task-5101382" [ 989.711133] env[63538]: _type = "Task" [ 989.711133] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.729574] env[63538]: DEBUG nova.compute.provider_tree [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 989.737899] env[63538]: DEBUG oslo_vmware.api [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101382, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.745519] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "f5d92749-04d6-4935-8dc6-afb692222df0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.745932] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "f5d92749-04d6-4935-8dc6-afb692222df0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.746236] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "f5d92749-04d6-4935-8dc6-afb692222df0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.746479] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "f5d92749-04d6-4935-8dc6-afb692222df0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.746717] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "f5d92749-04d6-4935-8dc6-afb692222df0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.751596] env[63538]: INFO nova.compute.manager [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Terminating instance [ 989.753974] env[63538]: DEBUG nova.compute.manager [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 989.754226] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 989.755169] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d9e5aca-fd00-4618-8816-23e55b73ec69 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.767377] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 989.769175] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aead412a-5caf-4a30-b2c1-713152d63640 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.778712] env[63538]: DEBUG oslo_vmware.api [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 989.778712] env[63538]: value = "task-5101383" [ 989.778712] env[63538]: _type = "Task" [ 989.778712] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.789434] env[63538]: DEBUG oslo_vmware.api [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101383, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.817200] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "0a0d0372-dede-4df0-bb9e-231e8a5b3742" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.817412] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "0a0d0372-dede-4df0-bb9e-231e8a5b3742" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.832246] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101379, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.564759} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.834807] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] fb26fb32-a420-4667-850c-e32786edd8f2/fb26fb32-a420-4667-850c-e32786edd8f2.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 989.835782] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 989.835782] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0e621103-9890-46af-af04-1cbae4e390ce {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.845707] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 989.845707] env[63538]: value = "task-5101384" [ 989.845707] env[63538]: _type = "Task" [ 989.845707] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.859945] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101384, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.870949] env[63538]: DEBUG oslo_vmware.rw_handles [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Completed reading data from the image iterator. {{(pid=63538) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 989.871234] env[63538]: DEBUG oslo_vmware.rw_handles [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523e84b9-d73d-1270-a723-e2b926a18490/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 989.872513] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad50b85-9df1-4708-8b65-1a99d61c63b5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.880900] env[63538]: DEBUG oslo_vmware.rw_handles [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523e84b9-d73d-1270-a723-e2b926a18490/disk-0.vmdk is in state: ready. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 989.881192] env[63538]: DEBUG oslo_vmware.rw_handles [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523e84b9-d73d-1270-a723-e2b926a18490/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 989.881545] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-62ee417b-a790-47fd-be25-e79b2e0a1d2a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.057732] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3f17349-69cb-4cf5-b301-630cb5931bea tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.264s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.225658] env[63538]: DEBUG oslo_vmware.api [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Task: {'id': task-5101382, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16176} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.225658] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 990.225658] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 990.225658] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 990.225658] env[63538]: INFO nova.compute.manager [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Took 1.15 seconds to destroy the instance on the hypervisor. [ 990.225658] env[63538]: DEBUG oslo.service.loopingcall [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 990.225658] env[63538]: DEBUG nova.compute.manager [-] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 990.225658] env[63538]: DEBUG nova.network.neutron [-] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 990.236331] env[63538]: DEBUG nova.scheduler.client.report [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 990.294270] env[63538]: DEBUG oslo_vmware.api [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101383, 'name': PowerOffVM_Task, 'duration_secs': 0.256844} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.297590] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 990.297937] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 990.298269] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2f551475-a414-4ff7-9c80-ea6c2d5b394e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.324614] env[63538]: DEBUG nova.compute.manager [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 990.347823] env[63538]: DEBUG nova.virt.hardware [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 990.348243] env[63538]: DEBUG nova.virt.hardware [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 990.348325] env[63538]: DEBUG nova.virt.hardware [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 990.348506] env[63538]: DEBUG nova.virt.hardware [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 990.352017] env[63538]: DEBUG nova.virt.hardware [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 990.352017] env[63538]: DEBUG nova.virt.hardware [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 990.352017] env[63538]: DEBUG nova.virt.hardware [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 990.352017] env[63538]: DEBUG nova.virt.hardware [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 990.352017] env[63538]: DEBUG nova.virt.hardware [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 990.352017] env[63538]: DEBUG nova.virt.hardware [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 990.352017] env[63538]: DEBUG nova.virt.hardware [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 990.352017] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a616c0e-0814-4999-9f1c-28bfae607cf8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.363307] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101384, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081611} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.367611] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 990.368077] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 990.368301] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 990.368540] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Deleting the datastore file [datastore1] f5d92749-04d6-4935-8dc6-afb692222df0 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 990.369375] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6585cfab-b7aa-4d9f-a508-b1006913c86b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.373561] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a356f255-467a-4ce3-85d0-7dd7f00459a6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.378923] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-11143185-1df3-4718-91de-2aea38ac4d0d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.385350] env[63538]: DEBUG oslo_vmware.api [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 990.385350] env[63538]: value = "task-5101386" [ 990.385350] env[63538]: _type = "Task" [ 990.385350] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.403266] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:ce:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f39e3b37-7906-4bbc-820e-ceac74e4d827', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 990.413031] env[63538]: DEBUG oslo.service.loopingcall [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 990.419652] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] fb26fb32-a420-4667-850c-e32786edd8f2/fb26fb32-a420-4667-850c-e32786edd8f2.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 990.423546] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 990.427019] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-351afdd3-8a78-4eea-bca9-cc01a03c77c0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.438603] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c6f69502-0710-403f-b167-e73efe294f8c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.460565] env[63538]: DEBUG oslo_vmware.api [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101386, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.463183] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 990.463183] env[63538]: value = "task-5101387" [ 990.463183] env[63538]: _type = "Task" [ 990.463183] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.463455] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 990.463455] env[63538]: value = "task-5101388" [ 990.463455] env[63538]: _type = "Task" [ 990.463455] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.475396] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101388, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.479222] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101387, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.666018] env[63538]: DEBUG oslo_vmware.rw_handles [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523e84b9-d73d-1270-a723-e2b926a18490/disk-0.vmdk. {{(pid=63538) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 990.666018] env[63538]: INFO nova.virt.vmwareapi.images [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Downloaded image file data b299d3c5-b48d-4f1f-a911-692e6a986d0c [ 990.667943] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-730a26ee-a912-45a2-9875-bc201f565c5f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.687220] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a57720aa-7927-47b2-9d97-341b43612abe {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.716083] env[63538]: INFO nova.virt.vmwareapi.images [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] The imported VM was unregistered [ 990.718488] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Caching image {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 990.718731] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Creating directory with path [datastore2] devstack-image-cache_base/b299d3c5-b48d-4f1f-a911-692e6a986d0c {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 990.719076] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7306815b-cd1f-4e4e-8557-bdd8427d0ac4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.733900] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Created directory with path [datastore2] devstack-image-cache_base/b299d3c5-b48d-4f1f-a911-692e6a986d0c {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 990.734465] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_aa074a5e-caf6-4dbd-a8d5-a33508687552/OSTACK_IMG_aa074a5e-caf6-4dbd-a8d5-a33508687552.vmdk to [datastore2] devstack-image-cache_base/b299d3c5-b48d-4f1f-a911-692e6a986d0c/b299d3c5-b48d-4f1f-a911-692e6a986d0c.vmdk. {{(pid=63538) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 990.734465] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-ee28e378-efde-473b-8575-a00f0e412350 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.743977] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.973s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.749209] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 990.749209] env[63538]: value = "task-5101390" [ 990.749209] env[63538]: _type = "Task" [ 990.749209] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.749480] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.253s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.749520] env[63538]: DEBUG nova.objects.instance [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lazy-loading 'resources' on Instance uuid 0df15328-aebd-44c5-9c78-ee05f188ad95 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 990.763666] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101390, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.774738] env[63538]: INFO nova.scheduler.client.report [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Deleted allocations for instance 376ee3d9-e8b5-4f47-9622-b873126b492e [ 990.781314] env[63538]: DEBUG nova.compute.manager [req-6e5e5400-1cee-459a-bf58-9f250b999c65 req-ac744901-0440-48c1-a87f-a9e7df452f4d service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Received event network-changed-8240a40a-4486-4213-ac28-8eee15d652a8 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 990.781553] env[63538]: DEBUG nova.compute.manager [req-6e5e5400-1cee-459a-bf58-9f250b999c65 req-ac744901-0440-48c1-a87f-a9e7df452f4d service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Refreshing instance network info cache due to event network-changed-8240a40a-4486-4213-ac28-8eee15d652a8. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 990.781821] env[63538]: DEBUG oslo_concurrency.lockutils [req-6e5e5400-1cee-459a-bf58-9f250b999c65 req-ac744901-0440-48c1-a87f-a9e7df452f4d service nova] Acquiring lock "refresh_cache-209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 990.781920] env[63538]: DEBUG oslo_concurrency.lockutils [req-6e5e5400-1cee-459a-bf58-9f250b999c65 req-ac744901-0440-48c1-a87f-a9e7df452f4d service nova] Acquired lock "refresh_cache-209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.782157] env[63538]: DEBUG nova.network.neutron [req-6e5e5400-1cee-459a-bf58-9f250b999c65 req-ac744901-0440-48c1-a87f-a9e7df452f4d service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Refreshing network info cache for port 8240a40a-4486-4213-ac28-8eee15d652a8 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 990.863446] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.915756] env[63538]: DEBUG oslo_vmware.api [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101386, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197881} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.915939] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 990.916198] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 990.916457] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 990.916701] env[63538]: INFO nova.compute.manager [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Took 1.16 seconds to destroy the instance on the hypervisor. [ 990.917017] env[63538]: DEBUG oslo.service.loopingcall [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 990.917277] env[63538]: DEBUG nova.compute.manager [-] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 990.917430] env[63538]: DEBUG nova.network.neutron [-] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 990.978654] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101387, 'name': ReconfigVM_Task, 'duration_secs': 0.437732} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.983471] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Reconfigured VM instance instance-00000057 to attach disk [datastore2] fb26fb32-a420-4667-850c-e32786edd8f2/fb26fb32-a420-4667-850c-e32786edd8f2.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 990.983471] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101388, 'name': CreateVM_Task, 'duration_secs': 0.400363} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.983653] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b9972344-229d-46f0-975c-4d7b13aacd48 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.985464] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 990.986227] env[63538]: DEBUG oslo_concurrency.lockutils [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 990.986451] env[63538]: DEBUG oslo_concurrency.lockutils [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.987530] env[63538]: DEBUG oslo_concurrency.lockutils [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 990.988285] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8017755-88b5-44be-b368-67de4c32e8d9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.994824] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 990.994824] env[63538]: value = "task-5101391" [ 990.994824] env[63538]: _type = "Task" [ 990.994824] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.000171] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 991.000171] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522e8de7-1a90-a06e-89d2-aa02235c1e07" [ 991.000171] env[63538]: _type = "Task" [ 991.000171] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.009880] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101391, 'name': Rename_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.014345] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522e8de7-1a90-a06e-89d2-aa02235c1e07, 'name': SearchDatastore_Task, 'duration_secs': 0.01095} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.014639] env[63538]: DEBUG oslo_concurrency.lockutils [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 991.014924] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 991.015497] env[63538]: DEBUG oslo_concurrency.lockutils [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.015626] env[63538]: DEBUG oslo_concurrency.lockutils [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.015979] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 991.016271] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-998c4489-9561-4b21-ab0a-194b5e113b0f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.037507] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 991.037757] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 991.038475] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ed681c9-b425-4568-9331-a6021a37e18c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.047851] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 991.047851] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527c02bc-8947-4c44-209e-aaa49d85c384" [ 991.047851] env[63538]: _type = "Task" [ 991.047851] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.059357] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527c02bc-8947-4c44-209e-aaa49d85c384, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.254488] env[63538]: DEBUG nova.objects.instance [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lazy-loading 'numa_topology' on Instance uuid 0df15328-aebd-44c5-9c78-ee05f188ad95 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 991.269167] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101390, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.291968] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ccebaf1f-4fff-487b-b447-0c1367946e28 tempest-ListImageFiltersTestJSON-316064239 tempest-ListImageFiltersTestJSON-316064239-project-member] Lock "376ee3d9-e8b5-4f47-9622-b873126b492e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.186s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.351140] env[63538]: DEBUG nova.network.neutron [-] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.515656] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101391, 'name': Rename_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.563958] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527c02bc-8947-4c44-209e-aaa49d85c384, 'name': SearchDatastore_Task, 'duration_secs': 0.08132} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.565111] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0612a989-4e24-4d2a-8b88-dad2da1d9acd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.572824] env[63538]: DEBUG nova.network.neutron [req-6e5e5400-1cee-459a-bf58-9f250b999c65 req-ac744901-0440-48c1-a87f-a9e7df452f4d service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Updated VIF entry in instance network info cache for port 8240a40a-4486-4213-ac28-8eee15d652a8. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 991.573348] env[63538]: DEBUG nova.network.neutron [req-6e5e5400-1cee-459a-bf58-9f250b999c65 req-ac744901-0440-48c1-a87f-a9e7df452f4d service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Updating instance_info_cache with network_info: [{"id": "8240a40a-4486-4213-ac28-8eee15d652a8", "address": "fa:16:3e:41:82:69", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8240a40a-44", "ovs_interfaceid": "8240a40a-4486-4213-ac28-8eee15d652a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.576187] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 991.576187] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52aacdb3-c626-2c78-2032-7c659dc97055" [ 991.576187] env[63538]: _type = "Task" [ 991.576187] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.590968] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52aacdb3-c626-2c78-2032-7c659dc97055, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.761942] env[63538]: DEBUG nova.objects.base [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Object Instance<0df15328-aebd-44c5-9c78-ee05f188ad95> lazy-loaded attributes: resources,numa_topology {{(pid=63538) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 991.768419] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101390, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.799319] env[63538]: DEBUG nova.network.neutron [-] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.858034] env[63538]: INFO nova.compute.manager [-] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Took 1.63 seconds to deallocate network for instance. [ 992.009457] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101391, 'name': Rename_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.077705] env[63538]: DEBUG oslo_concurrency.lockutils [req-6e5e5400-1cee-459a-bf58-9f250b999c65 req-ac744901-0440-48c1-a87f-a9e7df452f4d service nova] Releasing lock "refresh_cache-209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 992.095068] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52aacdb3-c626-2c78-2032-7c659dc97055, 'name': SearchDatastore_Task, 'duration_secs': 0.097297} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.095068] env[63538]: DEBUG oslo_concurrency.lockutils [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 992.095246] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 8ed0bd15-71fc-435e-9e4a-90b023ad8a79/8ed0bd15-71fc-435e-9e4a-90b023ad8a79.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 992.095651] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-30d9a81a-174d-4731-ae35-901a92cc7db6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.108973] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 992.108973] env[63538]: value = "task-5101392" [ 992.108973] env[63538]: _type = "Task" [ 992.108973] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.123361] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101392, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.127759] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efff275b-cfc1-4211-89d9-7e34167878a9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.137200] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-620ab5a3-3836-43c8-98e8-d7621816155c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.179929] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23066a79-b1fb-42a5-bbf2-338c4ad111b9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.191590] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45d0e59d-127d-44bf-a9e5-339351c58f02 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.216425] env[63538]: DEBUG nova.compute.provider_tree [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 992.271718] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101390, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.306044] env[63538]: INFO nova.compute.manager [-] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Took 1.39 seconds to deallocate network for instance. [ 992.363818] env[63538]: DEBUG oslo_concurrency.lockutils [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.510660] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101391, 'name': Rename_Task, 'duration_secs': 1.075162} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.511581] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 992.511581] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ea5374c-5436-4aa4-b68e-0244dab30798 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.522176] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 992.522176] env[63538]: value = "task-5101393" [ 992.522176] env[63538]: _type = "Task" [ 992.522176] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.534345] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101393, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.620738] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101392, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.719281] env[63538]: DEBUG nova.scheduler.client.report [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 992.769542] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101390, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.810673] env[63538]: DEBUG nova.compute.manager [req-c71698ec-00b1-4f68-adc5-483e9d61e836 req-fb3c1e6e-4212-4392-b96b-941cd32699d7 service nova] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Received event network-vif-deleted-2e8868e8-3746-43e8-906c-20e0cd0e7336 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 992.810768] env[63538]: DEBUG nova.compute.manager [req-c71698ec-00b1-4f68-adc5-483e9d61e836 req-fb3c1e6e-4212-4392-b96b-941cd32699d7 service nova] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Received event network-vif-deleted-733479ad-a05b-44cf-a265-b766fea3b2ba {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 992.814272] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.038927] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101393, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.123026] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101392, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.140754] env[63538]: DEBUG oslo_concurrency.lockutils [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Acquiring lock "148790a7-0a35-4d26-ae9f-6f954a161c88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.141017] env[63538]: DEBUG oslo_concurrency.lockutils [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Lock "148790a7-0a35-4d26-ae9f-6f954a161c88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.225328] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.476s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.228416] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 16.485s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.228618] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.228827] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63538) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 993.229090] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.485s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.230659] env[63538]: INFO nova.compute.claims [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 993.235167] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8089bc79-3e37-4caa-b203-5cc755b583e3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.246401] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34a929f3-555c-4f39-ab9e-8774b3d5d566 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.271264] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7367fb7b-e2f5-4f8f-b898-b4d18a0cd979 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.283952] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d47c53d-0a92-4e76-ac35-45ac77bce215 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.289026] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101390, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.319563] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178207MB free_disk=95GB free_vcpus=48 pci_devices=None {{(pid=63538) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 993.319748] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.536466] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101393, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.621973] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101392, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.645051] env[63538]: DEBUG nova.compute.manager [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 993.743465] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f962ad5c-351f-433c-a70f-64011ff17d07 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "0df15328-aebd-44c5-9c78-ee05f188ad95" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 46.023s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.745028] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "0df15328-aebd-44c5-9c78-ee05f188ad95" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 15.112s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.745271] env[63538]: INFO nova.compute.manager [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Unshelving [ 993.779196] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101390, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.68599} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.779537] env[63538]: INFO nova.virt.vmwareapi.ds_util [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_aa074a5e-caf6-4dbd-a8d5-a33508687552/OSTACK_IMG_aa074a5e-caf6-4dbd-a8d5-a33508687552.vmdk to [datastore2] devstack-image-cache_base/b299d3c5-b48d-4f1f-a911-692e6a986d0c/b299d3c5-b48d-4f1f-a911-692e6a986d0c.vmdk. [ 993.779812] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Cleaning up location [datastore2] OSTACK_IMG_aa074a5e-caf6-4dbd-a8d5-a33508687552 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 993.780084] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_aa074a5e-caf6-4dbd-a8d5-a33508687552 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 993.780451] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-42da5221-2b9a-45db-af27-5d261c837bf1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.786995] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 993.786995] env[63538]: value = "task-5101394" [ 993.786995] env[63538]: _type = "Task" [ 993.786995] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.796143] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101394, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.035787] env[63538]: DEBUG oslo_vmware.api [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101393, 'name': PowerOnVM_Task, 'duration_secs': 1.4509} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.036201] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 994.036471] env[63538]: INFO nova.compute.manager [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Took 9.78 seconds to spawn the instance on the hypervisor. [ 994.036749] env[63538]: DEBUG nova.compute.manager [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 994.037872] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c4686d-e561-438b-b38a-6164f0823f5f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.121060] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101392, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.659257} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.121060] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 8ed0bd15-71fc-435e-9e4a-90b023ad8a79/8ed0bd15-71fc-435e-9e4a-90b023ad8a79.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 994.121341] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 994.121453] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-29be26e8-1aea-4f9c-bf20-db57f2a3c1d9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.129019] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 994.129019] env[63538]: value = "task-5101395" [ 994.129019] env[63538]: _type = "Task" [ 994.129019] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.138201] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101395, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.166566] env[63538]: DEBUG oslo_concurrency.lockutils [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.301722] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101394, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073116} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.302008] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 994.302198] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b299d3c5-b48d-4f1f-a911-692e6a986d0c/b299d3c5-b48d-4f1f-a911-692e6a986d0c.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.302460] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b299d3c5-b48d-4f1f-a911-692e6a986d0c/b299d3c5-b48d-4f1f-a911-692e6a986d0c.vmdk to [datastore2] 0339c969-ad97-47b1-8fab-ee595738d9df/0339c969-ad97-47b1-8fab-ee595738d9df.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 994.302740] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed77c894-9b6c-405d-beef-c52253236bd6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.312675] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 994.312675] env[63538]: value = "task-5101396" [ 994.312675] env[63538]: _type = "Task" [ 994.312675] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.322689] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101396, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.522978] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96e7250c-667f-48f6-85f5-a5ff2cf850ee {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.531170] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d04c2c-45ec-4c6a-8bc7-ede45e6e3722 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.569678] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825858be-85ad-4b31-a5b6-0e5aaacbcc6f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.572964] env[63538]: INFO nova.compute.manager [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Took 31.93 seconds to build instance. [ 994.580900] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-800938da-2462-4e97-b761-a56568438ecd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.598083] env[63538]: DEBUG nova.compute.provider_tree [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 994.640591] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101395, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06817} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.640915] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 994.641811] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e793fe4d-ba1d-450f-99e5-debef9f2da8a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.665879] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 8ed0bd15-71fc-435e-9e4a-90b023ad8a79/8ed0bd15-71fc-435e-9e4a-90b023ad8a79.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 994.666306] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efb2edab-5f85-4d87-8431-7ae089e34792 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.687590] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 994.687590] env[63538]: value = "task-5101397" [ 994.687590] env[63538]: _type = "Task" [ 994.687590] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.698420] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101397, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.756303] env[63538]: DEBUG nova.compute.utils [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 994.823654] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101396, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.075476] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc3d0998-e056-431f-8fb6-7beea4c90bc8 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "fb26fb32-a420-4667-850c-e32786edd8f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.441s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.102806] env[63538]: DEBUG nova.scheduler.client.report [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 995.167416] env[63538]: DEBUG nova.compute.manager [req-f7464481-d0d4-47d6-99bc-c65699151f82 req-c6bd2db1-b242-4005-9197-66c768690476 service nova] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Received event network-changed-5250918c-5112-49ad-b1d3-f73c2d534637 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 995.167643] env[63538]: DEBUG nova.compute.manager [req-f7464481-d0d4-47d6-99bc-c65699151f82 req-c6bd2db1-b242-4005-9197-66c768690476 service nova] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Refreshing instance network info cache due to event network-changed-5250918c-5112-49ad-b1d3-f73c2d534637. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 995.167840] env[63538]: DEBUG oslo_concurrency.lockutils [req-f7464481-d0d4-47d6-99bc-c65699151f82 req-c6bd2db1-b242-4005-9197-66c768690476 service nova] Acquiring lock "refresh_cache-fb26fb32-a420-4667-850c-e32786edd8f2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.168430] env[63538]: DEBUG oslo_concurrency.lockutils [req-f7464481-d0d4-47d6-99bc-c65699151f82 req-c6bd2db1-b242-4005-9197-66c768690476 service nova] Acquired lock "refresh_cache-fb26fb32-a420-4667-850c-e32786edd8f2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.169150] env[63538]: DEBUG nova.network.neutron [req-f7464481-d0d4-47d6-99bc-c65699151f82 req-c6bd2db1-b242-4005-9197-66c768690476 service nova] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Refreshing network info cache for port 5250918c-5112-49ad-b1d3-f73c2d534637 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 995.200921] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101397, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.259723] env[63538]: INFO nova.virt.block_device [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Booting with volume dc4092b5-f968-4e95-b286-e9901b2a6c30 at /dev/sdb [ 995.300188] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4d865fc5-266c-4d69-a86a-3fc1aae61a39 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.313641] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf1478a-7505-4484-9163-af44c9932dce {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.337351] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101396, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.359822] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5e851ec7-241d-433a-abce-c1c0ea397857 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.371858] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc0a2359-1afb-40d9-a090-b7f6bedb82dd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.414156] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed064cfa-05a3-418d-bc27-f03ec333e51f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.425328] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6054278d-91a2-46d5-a6ce-515ba37eae02 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.443401] env[63538]: DEBUG nova.virt.block_device [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Updating existing volume attachment record: e267fc57-cfe1-4c88-8ad5-689adcbc004d {{(pid=63538) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 995.608652] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.379s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.609252] env[63538]: DEBUG nova.compute.manager [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 995.612157] env[63538]: DEBUG oslo_concurrency.lockutils [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.785s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.614865] env[63538]: INFO nova.compute.claims [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 995.726926] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101397, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.841962] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101396, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.954618] env[63538]: DEBUG nova.network.neutron [req-f7464481-d0d4-47d6-99bc-c65699151f82 req-c6bd2db1-b242-4005-9197-66c768690476 service nova] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Updated VIF entry in instance network info cache for port 5250918c-5112-49ad-b1d3-f73c2d534637. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 995.955073] env[63538]: DEBUG nova.network.neutron [req-f7464481-d0d4-47d6-99bc-c65699151f82 req-c6bd2db1-b242-4005-9197-66c768690476 service nova] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Updating instance_info_cache with network_info: [{"id": "5250918c-5112-49ad-b1d3-f73c2d534637", "address": "fa:16:3e:6d:44:6f", "network": {"id": "4d592811-793b-45d6-a6d3-fff8ca7bbf30", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-654803980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df090f9a727d4cf4a0f466e27928bdc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5250918c-51", "ovs_interfaceid": "5250918c-5112-49ad-b1d3-f73c2d534637", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.119452] env[63538]: DEBUG nova.compute.utils [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 996.123749] env[63538]: DEBUG nova.compute.manager [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 996.124253] env[63538]: DEBUG nova.network.neutron [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 996.180186] env[63538]: DEBUG nova.policy [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '034ec4f48d8e40c9b99e83b08b9c0c5a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9427981aac124f6aa0c4d8d45b0ae917', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 996.203548] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101397, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.342875] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101396, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.461906] env[63538]: DEBUG oslo_concurrency.lockutils [req-f7464481-d0d4-47d6-99bc-c65699151f82 req-c6bd2db1-b242-4005-9197-66c768690476 service nova] Releasing lock "refresh_cache-fb26fb32-a420-4667-850c-e32786edd8f2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.627280] env[63538]: DEBUG nova.compute.manager [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 996.684880] env[63538]: DEBUG nova.network.neutron [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Successfully created port: c9561ca9-cb68-4037-807e-9f89307cb528 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 996.712415] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101397, 'name': ReconfigVM_Task, 'duration_secs': 1.685899} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.712577] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 8ed0bd15-71fc-435e-9e4a-90b023ad8a79/8ed0bd15-71fc-435e-9e4a-90b023ad8a79.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 996.713294] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7ee48fe3-b7d8-4cf2-afb3-98db9910d3ea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.730461] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 996.730461] env[63538]: value = "task-5101401" [ 996.730461] env[63538]: _type = "Task" [ 996.730461] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.745029] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101401, 'name': Rename_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.845049] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101396, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.007607] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-871f25b2-6ecb-40d3-99ee-a42c8812808e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.017141] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31ed2e0-f21e-440c-86b4-517a3d2ec20c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.055052] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808fe659-1d98-432a-9f5d-27b099ac1456 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.065610] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e17135-ec4b-4c81-83af-07d7a2c2d296 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.082677] env[63538]: DEBUG nova.compute.provider_tree [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 997.248040] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101401, 'name': Rename_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.342186] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101396, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.985247} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.342480] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b299d3c5-b48d-4f1f-a911-692e6a986d0c/b299d3c5-b48d-4f1f-a911-692e6a986d0c.vmdk to [datastore2] 0339c969-ad97-47b1-8fab-ee595738d9df/0339c969-ad97-47b1-8fab-ee595738d9df.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 997.343340] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0057976a-1fe4-4ede-841d-538548eb7d0f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.366927] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] 0339c969-ad97-47b1-8fab-ee595738d9df/0339c969-ad97-47b1-8fab-ee595738d9df.vmdk or device None with type streamOptimized {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 997.367422] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-127e9bc4-068b-4dc1-9bad-d99dd6c979c3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.394634] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 997.394634] env[63538]: value = "task-5101402" [ 997.394634] env[63538]: _type = "Task" [ 997.394634] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.405994] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101402, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.585987] env[63538]: DEBUG nova.scheduler.client.report [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 997.637896] env[63538]: DEBUG nova.compute.manager [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 997.667703] env[63538]: DEBUG nova.virt.hardware [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 997.668463] env[63538]: DEBUG nova.virt.hardware [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 997.668463] env[63538]: DEBUG nova.virt.hardware [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 997.668463] env[63538]: DEBUG nova.virt.hardware [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 997.668610] env[63538]: DEBUG nova.virt.hardware [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 997.669320] env[63538]: DEBUG nova.virt.hardware [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 997.669320] env[63538]: DEBUG nova.virt.hardware [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 997.669320] env[63538]: DEBUG nova.virt.hardware [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 997.669320] env[63538]: DEBUG nova.virt.hardware [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 997.669540] env[63538]: DEBUG nova.virt.hardware [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 997.669575] env[63538]: DEBUG nova.virt.hardware [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 997.670528] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4752ebe7-46d8-459a-9859-5fa1ed665871 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.679736] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a4f71fe-0093-4ad3-870e-8ca1020a77ff {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.747860] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101401, 'name': Rename_Task, 'duration_secs': 0.653685} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.750197] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 997.750197] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ba5d7a26-08cf-47f6-8277-d488f33c7616 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.757255] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 997.757255] env[63538]: value = "task-5101403" [ 997.757255] env[63538]: _type = "Task" [ 997.757255] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.767439] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101403, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.905270] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101402, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.092796] env[63538]: DEBUG oslo_concurrency.lockutils [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.481s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.093450] env[63538]: DEBUG nova.compute.manager [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 998.096401] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.894s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.096777] env[63538]: DEBUG nova.objects.instance [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lazy-loading 'resources' on Instance uuid 6bc30d96-8056-421c-875b-c24488e5f595 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 998.268601] env[63538]: DEBUG nova.compute.manager [req-f96c4c3d-b921-414c-9765-d3c0ecf536e6 req-4e99484d-adc5-4a08-b7d7-22bd07518b3e service nova] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Received event network-vif-plugged-c9561ca9-cb68-4037-807e-9f89307cb528 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 998.268842] env[63538]: DEBUG oslo_concurrency.lockutils [req-f96c4c3d-b921-414c-9765-d3c0ecf536e6 req-4e99484d-adc5-4a08-b7d7-22bd07518b3e service nova] Acquiring lock "431a67e6-b90d-4930-9a86-7c49d1022ddc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.273217] env[63538]: DEBUG oslo_concurrency.lockutils [req-f96c4c3d-b921-414c-9765-d3c0ecf536e6 req-4e99484d-adc5-4a08-b7d7-22bd07518b3e service nova] Lock "431a67e6-b90d-4930-9a86-7c49d1022ddc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.273288] env[63538]: DEBUG oslo_concurrency.lockutils [req-f96c4c3d-b921-414c-9765-d3c0ecf536e6 req-4e99484d-adc5-4a08-b7d7-22bd07518b3e service nova] Lock "431a67e6-b90d-4930-9a86-7c49d1022ddc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.004s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.273566] env[63538]: DEBUG nova.compute.manager [req-f96c4c3d-b921-414c-9765-d3c0ecf536e6 req-4e99484d-adc5-4a08-b7d7-22bd07518b3e service nova] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] No waiting events found dispatching network-vif-plugged-c9561ca9-cb68-4037-807e-9f89307cb528 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 998.273808] env[63538]: WARNING nova.compute.manager [req-f96c4c3d-b921-414c-9765-d3c0ecf536e6 req-4e99484d-adc5-4a08-b7d7-22bd07518b3e service nova] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Received unexpected event network-vif-plugged-c9561ca9-cb68-4037-807e-9f89307cb528 for instance with vm_state building and task_state spawning. [ 998.281354] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101403, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.350130] env[63538]: DEBUG nova.network.neutron [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Successfully updated port: c9561ca9-cb68-4037-807e-9f89307cb528 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 998.406334] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101402, 'name': ReconfigVM_Task, 'duration_secs': 0.7407} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.406717] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Reconfigured VM instance instance-00000056 to attach disk [datastore2] 0339c969-ad97-47b1-8fab-ee595738d9df/0339c969-ad97-47b1-8fab-ee595738d9df.vmdk or device None with type streamOptimized {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 998.407396] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8da63faa-97bd-4cd8-bb40-114182cc0c14 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.414818] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 998.414818] env[63538]: value = "task-5101405" [ 998.414818] env[63538]: _type = "Task" [ 998.414818] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.426099] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101405, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.600307] env[63538]: DEBUG nova.compute.utils [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 998.605151] env[63538]: DEBUG nova.compute.manager [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 998.605340] env[63538]: DEBUG nova.network.neutron [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 998.674616] env[63538]: DEBUG nova.policy [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ad1bddeca5346dea39d23339e09db3d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1a06b7cc1ab24ba584bbe970e4fc5e81', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 998.769210] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101403, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.853952] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "refresh_cache-431a67e6-b90d-4930-9a86-7c49d1022ddc" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.854083] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquired lock "refresh_cache-431a67e6-b90d-4930-9a86-7c49d1022ddc" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.854169] env[63538]: DEBUG nova.network.neutron [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 998.929663] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101405, 'name': Rename_Task, 'duration_secs': 0.336199} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.930051] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 998.930230] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-72bf6a8c-a275-4490-aafa-8806dce71317 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.937283] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 998.937283] env[63538]: value = "task-5101406" [ 998.937283] env[63538]: _type = "Task" [ 998.937283] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.946467] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101406, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.948325] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97540545-7029-4abe-984f-eecf0c913670 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.955740] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fff34d9-4625-4fef-82f7-3ea384ed1c88 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.989176] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c32a5aa8-b087-4a5c-a214-174187c94e78 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.999665] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57683cb-ae07-40db-9ae8-f9bc607ff922 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.016966] env[63538]: DEBUG nova.compute.provider_tree [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 999.046417] env[63538]: DEBUG nova.network.neutron [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Successfully created port: 2bf5c751-02ce-4e9e-8e98-68c3505b8aec {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 999.105678] env[63538]: DEBUG nova.compute.manager [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 999.270271] env[63538]: DEBUG oslo_vmware.api [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101403, 'name': PowerOnVM_Task, 'duration_secs': 1.284584} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.270462] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 999.270662] env[63538]: DEBUG nova.compute.manager [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 999.271541] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e4259da-5f12-4075-b7b5-64cbdf10253c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.396737] env[63538]: DEBUG nova.network.neutron [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 999.449928] env[63538]: DEBUG oslo_vmware.api [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101406, 'name': PowerOnVM_Task, 'duration_secs': 0.478501} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.450233] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 999.450437] env[63538]: INFO nova.compute.manager [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Took 17.73 seconds to spawn the instance on the hypervisor. [ 999.450650] env[63538]: DEBUG nova.compute.manager [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 999.451492] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dcaa8ee-020f-4a80-baf9-5c54c56cdbe2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.520986] env[63538]: DEBUG nova.scheduler.client.report [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 999.542029] env[63538]: DEBUG nova.network.neutron [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Updating instance_info_cache with network_info: [{"id": "c9561ca9-cb68-4037-807e-9f89307cb528", "address": "fa:16:3e:76:0e:f9", "network": {"id": "955bc6e2-cea0-4cf9-80fb-521ae0e565f0", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-726504029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9427981aac124f6aa0c4d8d45b0ae917", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c297fe21-cd0b-4226-813b-a65d2358d034", "external-id": "nsx-vlan-transportzone-98", "segmentation_id": 98, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9561ca9-cb", "ovs_interfaceid": "c9561ca9-cb68-4037-807e-9f89307cb528", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.792943] env[63538]: DEBUG oslo_concurrency.lockutils [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.971304] env[63538]: INFO nova.compute.manager [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Took 38.75 seconds to build instance. [ 1000.026379] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.930s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.028926] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.377s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.030591] env[63538]: INFO nova.compute.claims [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1000.044830] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Releasing lock "refresh_cache-431a67e6-b90d-4930-9a86-7c49d1022ddc" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.045213] env[63538]: DEBUG nova.compute.manager [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Instance network_info: |[{"id": "c9561ca9-cb68-4037-807e-9f89307cb528", "address": "fa:16:3e:76:0e:f9", "network": {"id": "955bc6e2-cea0-4cf9-80fb-521ae0e565f0", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-726504029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9427981aac124f6aa0c4d8d45b0ae917", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c297fe21-cd0b-4226-813b-a65d2358d034", "external-id": "nsx-vlan-transportzone-98", "segmentation_id": 98, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9561ca9-cb", "ovs_interfaceid": "c9561ca9-cb68-4037-807e-9f89307cb528", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1000.045741] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:0e:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c297fe21-cd0b-4226-813b-a65d2358d034', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c9561ca9-cb68-4037-807e-9f89307cb528', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1000.053952] env[63538]: DEBUG oslo.service.loopingcall [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1000.055412] env[63538]: INFO nova.scheduler.client.report [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Deleted allocations for instance 6bc30d96-8056-421c-875b-c24488e5f595 [ 1000.058110] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1000.059799] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5b913f5e-d1e1-4226-b1c2-892d9e940633 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.082336] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1000.082336] env[63538]: value = "task-5101407" [ 1000.082336] env[63538]: _type = "Task" [ 1000.082336] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.092356] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101407, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.117019] env[63538]: DEBUG nova.compute.manager [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1000.149211] env[63538]: DEBUG nova.virt.hardware [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1000.149527] env[63538]: DEBUG nova.virt.hardware [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1000.151072] env[63538]: DEBUG nova.virt.hardware [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1000.151072] env[63538]: DEBUG nova.virt.hardware [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1000.151072] env[63538]: DEBUG nova.virt.hardware [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1000.151072] env[63538]: DEBUG nova.virt.hardware [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1000.151072] env[63538]: DEBUG nova.virt.hardware [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1000.151072] env[63538]: DEBUG nova.virt.hardware [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1000.151072] env[63538]: DEBUG nova.virt.hardware [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1000.151072] env[63538]: DEBUG nova.virt.hardware [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1000.151627] env[63538]: DEBUG nova.virt.hardware [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1000.153207] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2041079-352f-4cc4-9ca0-3df1458a6471 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.163105] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bc2d6f1-0401-4214-8a4c-2af3d7c81c48 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.296070] env[63538]: DEBUG nova.compute.manager [req-d06af7a2-55b7-47f5-b979-f155e51c9d9f req-a4a9f1d5-45bd-480f-a26a-2e10b853e79d service nova] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Received event network-changed-c9561ca9-cb68-4037-807e-9f89307cb528 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1000.296783] env[63538]: DEBUG nova.compute.manager [req-d06af7a2-55b7-47f5-b979-f155e51c9d9f req-a4a9f1d5-45bd-480f-a26a-2e10b853e79d service nova] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Refreshing instance network info cache due to event network-changed-c9561ca9-cb68-4037-807e-9f89307cb528. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1000.296783] env[63538]: DEBUG oslo_concurrency.lockutils [req-d06af7a2-55b7-47f5-b979-f155e51c9d9f req-a4a9f1d5-45bd-480f-a26a-2e10b853e79d service nova] Acquiring lock "refresh_cache-431a67e6-b90d-4930-9a86-7c49d1022ddc" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.296783] env[63538]: DEBUG oslo_concurrency.lockutils [req-d06af7a2-55b7-47f5-b979-f155e51c9d9f req-a4a9f1d5-45bd-480f-a26a-2e10b853e79d service nova] Acquired lock "refresh_cache-431a67e6-b90d-4930-9a86-7c49d1022ddc" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.297047] env[63538]: DEBUG nova.network.neutron [req-d06af7a2-55b7-47f5-b979-f155e51c9d9f req-a4a9f1d5-45bd-480f-a26a-2e10b853e79d service nova] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Refreshing network info cache for port c9561ca9-cb68-4037-807e-9f89307cb528 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1000.404381] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "0339c969-ad97-47b1-8fab-ee595738d9df" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.474217] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bc42c906-411d-4da4-83c6-99a55f56334f tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "0339c969-ad97-47b1-8fab-ee595738d9df" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.260s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.474540] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "0339c969-ad97-47b1-8fab-ee595738d9df" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.070s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.474789] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "0339c969-ad97-47b1-8fab-ee595738d9df-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.475036] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "0339c969-ad97-47b1-8fab-ee595738d9df-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.475226] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "0339c969-ad97-47b1-8fab-ee595738d9df-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.478228] env[63538]: INFO nova.compute.manager [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Terminating instance [ 1000.480623] env[63538]: DEBUG nova.compute.manager [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1000.480884] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1000.481849] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c88460c-e867-42e8-b0e4-62e8ddbb79fd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.490509] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1000.490805] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-056a8196-0159-4c95-817f-5ee2bf060b06 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.502361] env[63538]: DEBUG oslo_vmware.api [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 1000.502361] env[63538]: value = "task-5101408" [ 1000.502361] env[63538]: _type = "Task" [ 1000.502361] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.515037] env[63538]: DEBUG oslo_vmware.api [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101408, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.579488] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8fb027f-3e2c-4659-bf28-c6cfc67cd543 tempest-ImagesOneServerNegativeTestJSON-620950544 tempest-ImagesOneServerNegativeTestJSON-620950544-project-member] Lock "6bc30d96-8056-421c-875b-c24488e5f595" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.911s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.593239] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101407, 'name': CreateVM_Task, 'duration_secs': 0.412736} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.593648] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1000.594216] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.594399] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.594829] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1000.595130] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d37d28e5-8b43-48e7-8376-14c8a56f6d3b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.601986] env[63538]: DEBUG oslo_vmware.api [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 1000.601986] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522ab491-829e-3cf8-658b-7df78396c759" [ 1000.601986] env[63538]: _type = "Task" [ 1000.601986] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.611240] env[63538]: DEBUG oslo_vmware.api [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522ab491-829e-3cf8-658b-7df78396c759, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.681878] env[63538]: DEBUG nova.network.neutron [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Successfully updated port: 2bf5c751-02ce-4e9e-8e98-68c3505b8aec {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1001.016373] env[63538]: DEBUG oslo_vmware.api [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101408, 'name': PowerOffVM_Task, 'duration_secs': 0.200212} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.017081] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1001.017597] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1001.018036] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-90d5ebfe-a8c4-473f-a542-a421559b2359 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.096394] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1001.096647] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1001.096847] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Deleting the datastore file [datastore2] 0339c969-ad97-47b1-8fab-ee595738d9df {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1001.097142] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-409769aa-a13b-4aac-b438-b3dc4098cd80 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.104394] env[63538]: DEBUG oslo_vmware.api [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 1001.104394] env[63538]: value = "task-5101410" [ 1001.104394] env[63538]: _type = "Task" [ 1001.104394] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.121343] env[63538]: DEBUG oslo_vmware.api [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522ab491-829e-3cf8-658b-7df78396c759, 'name': SearchDatastore_Task, 'duration_secs': 0.012776} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.123649] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.123930] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1001.124228] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1001.124439] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.124693] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1001.126016] env[63538]: DEBUG oslo_vmware.api [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101410, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.126016] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7dbafdb7-7284-48f1-a72b-d01b08b36dff {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.130280] env[63538]: DEBUG nova.network.neutron [req-d06af7a2-55b7-47f5-b979-f155e51c9d9f req-a4a9f1d5-45bd-480f-a26a-2e10b853e79d service nova] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Updated VIF entry in instance network info cache for port c9561ca9-cb68-4037-807e-9f89307cb528. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1001.130280] env[63538]: DEBUG nova.network.neutron [req-d06af7a2-55b7-47f5-b979-f155e51c9d9f req-a4a9f1d5-45bd-480f-a26a-2e10b853e79d service nova] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Updating instance_info_cache with network_info: [{"id": "c9561ca9-cb68-4037-807e-9f89307cb528", "address": "fa:16:3e:76:0e:f9", "network": {"id": "955bc6e2-cea0-4cf9-80fb-521ae0e565f0", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-726504029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9427981aac124f6aa0c4d8d45b0ae917", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c297fe21-cd0b-4226-813b-a65d2358d034", "external-id": "nsx-vlan-transportzone-98", "segmentation_id": 98, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9561ca9-cb", "ovs_interfaceid": "c9561ca9-cb68-4037-807e-9f89307cb528", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.136230] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1001.136230] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1001.136230] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a834c1c3-5cf8-4470-94ec-ea553aa1d42b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.143937] env[63538]: DEBUG oslo_vmware.api [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 1001.143937] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52da0dcd-380e-66ff-e041-1e42b9f51f1e" [ 1001.143937] env[63538]: _type = "Task" [ 1001.143937] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.159232] env[63538]: DEBUG oslo_vmware.api [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52da0dcd-380e-66ff-e041-1e42b9f51f1e, 'name': SearchDatastore_Task, 'duration_secs': 0.01147} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.160104] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-588be416-243d-4878-bf66-a3096c9ab392 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.167707] env[63538]: DEBUG oslo_vmware.api [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 1001.167707] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5232f8b4-941d-4c17-9be6-acd0f89747b3" [ 1001.167707] env[63538]: _type = "Task" [ 1001.167707] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.184369] env[63538]: DEBUG oslo_concurrency.lockutils [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "refresh_cache-3d80dc17-e330-4575-8e12-e06d8e76274a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1001.185932] env[63538]: DEBUG oslo_concurrency.lockutils [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquired lock "refresh_cache-3d80dc17-e330-4575-8e12-e06d8e76274a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.187288] env[63538]: DEBUG nova.network.neutron [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1001.189348] env[63538]: DEBUG oslo_vmware.api [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5232f8b4-941d-4c17-9be6-acd0f89747b3, 'name': SearchDatastore_Task, 'duration_secs': 0.010762} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.189348] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.190922] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 431a67e6-b90d-4930-9a86-7c49d1022ddc/431a67e6-b90d-4930-9a86-7c49d1022ddc.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1001.190922] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-93ce4919-b658-463e-967d-2598a08abae2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.199473] env[63538]: DEBUG oslo_vmware.api [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 1001.199473] env[63538]: value = "task-5101411" [ 1001.199473] env[63538]: _type = "Task" [ 1001.199473] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.209640] env[63538]: DEBUG oslo_vmware.api [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101411, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.410525] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c18633b-e03e-40bd-bef8-946ae519f4d8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.420963] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35430132-5a80-4c9d-a947-3086b78ad052 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.454988] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4494c745-46fb-427d-9b1a-f62fa8e16857 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.465225] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66ed3e27-7c33-4451-b127-8501b2e7319b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.480640] env[63538]: DEBUG nova.compute.provider_tree [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1001.580056] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.624262] env[63538]: DEBUG oslo_vmware.api [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101410, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170375} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.624262] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1001.624616] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1001.624790] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1001.625077] env[63538]: INFO nova.compute.manager [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1001.625469] env[63538]: DEBUG oslo.service.loopingcall [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1001.625783] env[63538]: DEBUG nova.compute.manager [-] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1001.625947] env[63538]: DEBUG nova.network.neutron [-] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1001.635068] env[63538]: DEBUG oslo_concurrency.lockutils [req-d06af7a2-55b7-47f5-b979-f155e51c9d9f req-a4a9f1d5-45bd-480f-a26a-2e10b853e79d service nova] Releasing lock "refresh_cache-431a67e6-b90d-4930-9a86-7c49d1022ddc" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.710669] env[63538]: DEBUG oslo_vmware.api [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101411, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.731150] env[63538]: DEBUG nova.network.neutron [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1001.881794] env[63538]: DEBUG nova.network.neutron [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Updating instance_info_cache with network_info: [{"id": "2bf5c751-02ce-4e9e-8e98-68c3505b8aec", "address": "fa:16:3e:90:72:79", "network": {"id": "690e6150-6e85-472d-baeb-85e69afd037b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1831468396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a06b7cc1ab24ba584bbe970e4fc5e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bf5c751-02", "ovs_interfaceid": "2bf5c751-02ce-4e9e-8e98-68c3505b8aec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.984196] env[63538]: DEBUG nova.scheduler.client.report [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1002.212082] env[63538]: DEBUG oslo_vmware.api [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101411, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.617773} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.212398] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 431a67e6-b90d-4930-9a86-7c49d1022ddc/431a67e6-b90d-4930-9a86-7c49d1022ddc.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1002.212630] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1002.213037] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0c90dc12-4cc7-4f5b-aa21-b1a3e3f98db5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.221102] env[63538]: DEBUG oslo_vmware.api [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 1002.221102] env[63538]: value = "task-5101412" [ 1002.221102] env[63538]: _type = "Task" [ 1002.221102] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.230622] env[63538]: DEBUG oslo_vmware.api [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101412, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.328835] env[63538]: DEBUG nova.compute.manager [req-a083e2c2-e34a-4886-941a-a60f1d1b3d7d req-06011e41-92ac-4d39-8169-3ba75ad6b75c service nova] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Received event network-vif-plugged-2bf5c751-02ce-4e9e-8e98-68c3505b8aec {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1002.329094] env[63538]: DEBUG oslo_concurrency.lockutils [req-a083e2c2-e34a-4886-941a-a60f1d1b3d7d req-06011e41-92ac-4d39-8169-3ba75ad6b75c service nova] Acquiring lock "3d80dc17-e330-4575-8e12-e06d8e76274a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.329323] env[63538]: DEBUG oslo_concurrency.lockutils [req-a083e2c2-e34a-4886-941a-a60f1d1b3d7d req-06011e41-92ac-4d39-8169-3ba75ad6b75c service nova] Lock "3d80dc17-e330-4575-8e12-e06d8e76274a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.329501] env[63538]: DEBUG oslo_concurrency.lockutils [req-a083e2c2-e34a-4886-941a-a60f1d1b3d7d req-06011e41-92ac-4d39-8169-3ba75ad6b75c service nova] Lock "3d80dc17-e330-4575-8e12-e06d8e76274a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.329682] env[63538]: DEBUG nova.compute.manager [req-a083e2c2-e34a-4886-941a-a60f1d1b3d7d req-06011e41-92ac-4d39-8169-3ba75ad6b75c service nova] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] No waiting events found dispatching network-vif-plugged-2bf5c751-02ce-4e9e-8e98-68c3505b8aec {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1002.329858] env[63538]: WARNING nova.compute.manager [req-a083e2c2-e34a-4886-941a-a60f1d1b3d7d req-06011e41-92ac-4d39-8169-3ba75ad6b75c service nova] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Received unexpected event network-vif-plugged-2bf5c751-02ce-4e9e-8e98-68c3505b8aec for instance with vm_state building and task_state spawning. [ 1002.330036] env[63538]: DEBUG nova.compute.manager [req-a083e2c2-e34a-4886-941a-a60f1d1b3d7d req-06011e41-92ac-4d39-8169-3ba75ad6b75c service nova] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Received event network-changed-2bf5c751-02ce-4e9e-8e98-68c3505b8aec {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1002.330266] env[63538]: DEBUG nova.compute.manager [req-a083e2c2-e34a-4886-941a-a60f1d1b3d7d req-06011e41-92ac-4d39-8169-3ba75ad6b75c service nova] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Refreshing instance network info cache due to event network-changed-2bf5c751-02ce-4e9e-8e98-68c3505b8aec. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1002.330373] env[63538]: DEBUG oslo_concurrency.lockutils [req-a083e2c2-e34a-4886-941a-a60f1d1b3d7d req-06011e41-92ac-4d39-8169-3ba75ad6b75c service nova] Acquiring lock "refresh_cache-3d80dc17-e330-4575-8e12-e06d8e76274a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.384983] env[63538]: DEBUG oslo_concurrency.lockutils [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Releasing lock "refresh_cache-3d80dc17-e330-4575-8e12-e06d8e76274a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.385366] env[63538]: DEBUG nova.compute.manager [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Instance network_info: |[{"id": "2bf5c751-02ce-4e9e-8e98-68c3505b8aec", "address": "fa:16:3e:90:72:79", "network": {"id": "690e6150-6e85-472d-baeb-85e69afd037b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1831468396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a06b7cc1ab24ba584bbe970e4fc5e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bf5c751-02", "ovs_interfaceid": "2bf5c751-02ce-4e9e-8e98-68c3505b8aec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1002.385762] env[63538]: DEBUG oslo_concurrency.lockutils [req-a083e2c2-e34a-4886-941a-a60f1d1b3d7d req-06011e41-92ac-4d39-8169-3ba75ad6b75c service nova] Acquired lock "refresh_cache-3d80dc17-e330-4575-8e12-e06d8e76274a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.385964] env[63538]: DEBUG nova.network.neutron [req-a083e2c2-e34a-4886-941a-a60f1d1b3d7d req-06011e41-92ac-4d39-8169-3ba75ad6b75c service nova] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Refreshing network info cache for port 2bf5c751-02ce-4e9e-8e98-68c3505b8aec {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1002.387401] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:72:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e31264e2-3e0a-4dfb-ba1f-6389d7d47548', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2bf5c751-02ce-4e9e-8e98-68c3505b8aec', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1002.396894] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Creating folder: Project (1a06b7cc1ab24ba584bbe970e4fc5e81). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1002.398423] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4043692f-32b8-4825-aa54-0cb6541d5e30 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.413981] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Created folder: Project (1a06b7cc1ab24ba584bbe970e4fc5e81) in parent group-v992234. [ 1002.413981] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Creating folder: Instances. Parent ref: group-v992469. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1002.413981] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-af6e7e0b-9d5e-4233-ba04-7fe529cccb4d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.426553] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Created folder: Instances in parent group-v992469. [ 1002.426830] env[63538]: DEBUG oslo.service.loopingcall [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1002.427064] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1002.427293] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a505c22c-5bb2-4421-8d42-6cf0ec345433 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.451041] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1002.451041] env[63538]: value = "task-5101415" [ 1002.451041] env[63538]: _type = "Task" [ 1002.451041] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.461194] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101415, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.489431] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.460s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.489970] env[63538]: DEBUG nova.compute.manager [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1002.493153] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7e053699-6242-4849-963c-ead9e0cd3cab tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 18.579s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.734926] env[63538]: DEBUG oslo_vmware.api [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101412, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.103736} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.734926] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1002.734926] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eea69dfd-2786-46cf-b3b7-914a2531a129 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.766398] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 431a67e6-b90d-4930-9a86-7c49d1022ddc/431a67e6-b90d-4930-9a86-7c49d1022ddc.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1002.766398] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d054c58-d3c7-4448-bcd7-c387978b1c03 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.790158] env[63538]: DEBUG oslo_vmware.api [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 1002.790158] env[63538]: value = "task-5101416" [ 1002.790158] env[63538]: _type = "Task" [ 1002.790158] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.799672] env[63538]: DEBUG oslo_vmware.api [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101416, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.833552] env[63538]: DEBUG nova.network.neutron [-] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.964896] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101415, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.997594] env[63538]: DEBUG nova.compute.utils [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1003.013308] env[63538]: DEBUG nova.compute.manager [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1003.013615] env[63538]: DEBUG nova.network.neutron [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1003.110636] env[63538]: DEBUG nova.policy [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0b2b8ae848a74fef99bcdef108bb46bc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7f252fb32fc84f23b6ace53cee7f877e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1003.312943] env[63538]: DEBUG oslo_vmware.api [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101416, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.338122] env[63538]: INFO nova.compute.manager [-] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Took 1.71 seconds to deallocate network for instance. [ 1003.367385] env[63538]: DEBUG nova.network.neutron [req-a083e2c2-e34a-4886-941a-a60f1d1b3d7d req-06011e41-92ac-4d39-8169-3ba75ad6b75c service nova] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Updated VIF entry in instance network info cache for port 2bf5c751-02ce-4e9e-8e98-68c3505b8aec. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1003.367516] env[63538]: DEBUG nova.network.neutron [req-a083e2c2-e34a-4886-941a-a60f1d1b3d7d req-06011e41-92ac-4d39-8169-3ba75ad6b75c service nova] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Updating instance_info_cache with network_info: [{"id": "2bf5c751-02ce-4e9e-8e98-68c3505b8aec", "address": "fa:16:3e:90:72:79", "network": {"id": "690e6150-6e85-472d-baeb-85e69afd037b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1831468396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a06b7cc1ab24ba584bbe970e4fc5e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bf5c751-02", "ovs_interfaceid": "2bf5c751-02ce-4e9e-8e98-68c3505b8aec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.412912] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37120990-52e8-4996-9adb-22684ab9f037 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.421047] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5ccfadb-da6f-4f15-9077-fcda82dbf808 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.455691] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a97426b-e9fc-44d6-b1e5-dd458089e4da {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.467709] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101415, 'name': CreateVM_Task, 'duration_secs': 0.757729} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.467990] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1003.469324] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f99db22-6062-49e1-bac8-8e10062a3001 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.474221] env[63538]: DEBUG oslo_concurrency.lockutils [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.474437] env[63538]: DEBUG oslo_concurrency.lockutils [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.474774] env[63538]: DEBUG oslo_concurrency.lockutils [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1003.475082] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41de1b6b-fe93-4636-abbb-6a04f35d52e3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.480893] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1003.480893] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5229ca72-08a3-3e55-1d7b-ced4447db0d1" [ 1003.480893] env[63538]: _type = "Task" [ 1003.480893] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.489769] env[63538]: DEBUG nova.compute.provider_tree [None req-7e053699-6242-4849-963c-ead9e0cd3cab tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1003.500400] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5229ca72-08a3-3e55-1d7b-ced4447db0d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.510850] env[63538]: DEBUG nova.compute.manager [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1003.556677] env[63538]: DEBUG nova.network.neutron [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Successfully created port: 916b9a1d-5118-4f57-b7a9-9ca7fd8c9655 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1003.804298] env[63538]: DEBUG oslo_vmware.api [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101416, 'name': ReconfigVM_Task, 'duration_secs': 0.96697} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.804626] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 431a67e6-b90d-4930-9a86-7c49d1022ddc/431a67e6-b90d-4930-9a86-7c49d1022ddc.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1003.805379] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ebf12088-e04f-48ec-b360-67d77f22ec09 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.813375] env[63538]: DEBUG oslo_vmware.api [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 1003.813375] env[63538]: value = "task-5101417" [ 1003.813375] env[63538]: _type = "Task" [ 1003.813375] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.824267] env[63538]: DEBUG oslo_vmware.api [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101417, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.847685] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.870625] env[63538]: DEBUG oslo_concurrency.lockutils [req-a083e2c2-e34a-4886-941a-a60f1d1b3d7d req-06011e41-92ac-4d39-8169-3ba75ad6b75c service nova] Releasing lock "refresh_cache-3d80dc17-e330-4575-8e12-e06d8e76274a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.996215] env[63538]: DEBUG nova.scheduler.client.report [None req-7e053699-6242-4849-963c-ead9e0cd3cab tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1004.004698] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5229ca72-08a3-3e55-1d7b-ced4447db0d1, 'name': SearchDatastore_Task, 'duration_secs': 0.030108} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.005347] env[63538]: DEBUG oslo_concurrency.lockutils [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.005643] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1004.006391] env[63538]: DEBUG oslo_concurrency.lockutils [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1004.006391] env[63538]: DEBUG oslo_concurrency.lockutils [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.006391] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1004.006762] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5cba756a-71f0-4d6f-8f1f-8ca3ccdf49ed {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.020386] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1004.020690] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1004.021483] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdbec994-f240-42c0-88e5-2b4d303d8604 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.029047] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1004.029047] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52addcb4-180f-14e4-3d98-4416267d5c30" [ 1004.029047] env[63538]: _type = "Task" [ 1004.029047] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.040663] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52addcb4-180f-14e4-3d98-4416267d5c30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.324979] env[63538]: DEBUG oslo_vmware.api [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101417, 'name': Rename_Task, 'duration_secs': 0.26677} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.325397] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1004.325433] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-559548b8-21fa-40b8-8d9c-7d4b0f9e9439 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.335347] env[63538]: DEBUG oslo_vmware.api [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 1004.335347] env[63538]: value = "task-5101418" [ 1004.335347] env[63538]: _type = "Task" [ 1004.335347] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.351758] env[63538]: DEBUG oslo_vmware.api [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101418, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.499449] env[63538]: DEBUG nova.compute.manager [req-c54217c0-e5eb-41f1-9c50-467f7f754c96 req-bf5d7a77-2680-415f-9d08-6e045e60be01 service nova] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Received event network-vif-deleted-dc949e07-b50e-4c8e-8b94-e1c90b211bb7 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1004.517391] env[63538]: DEBUG nova.compute.manager [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1004.541714] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52addcb4-180f-14e4-3d98-4416267d5c30, 'name': SearchDatastore_Task, 'duration_secs': 0.014522} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.544949] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3db7b109-a16e-45bf-bf94-f0d59ffd46ca {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.552119] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1004.552119] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f81a74-e8fc-c623-d1ad-3c103ff4cd1d" [ 1004.552119] env[63538]: _type = "Task" [ 1004.552119] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.559621] env[63538]: DEBUG nova.virt.hardware [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1004.559875] env[63538]: DEBUG nova.virt.hardware [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1004.560024] env[63538]: DEBUG nova.virt.hardware [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1004.560214] env[63538]: DEBUG nova.virt.hardware [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1004.560368] env[63538]: DEBUG nova.virt.hardware [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1004.560518] env[63538]: DEBUG nova.virt.hardware [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1004.560793] env[63538]: DEBUG nova.virt.hardware [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1004.560884] env[63538]: DEBUG nova.virt.hardware [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1004.561061] env[63538]: DEBUG nova.virt.hardware [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1004.561228] env[63538]: DEBUG nova.virt.hardware [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1004.561409] env[63538]: DEBUG nova.virt.hardware [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1004.562617] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d92e04-67f9-475a-9fcc-aba24ac960db {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.574244] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f81a74-e8fc-c623-d1ad-3c103ff4cd1d, 'name': SearchDatastore_Task, 'duration_secs': 0.015614} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.575560] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ce112ed-5879-40e2-bdc4-f47637ea2a2c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.579926] env[63538]: DEBUG oslo_concurrency.lockutils [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.580245] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 3d80dc17-e330-4575-8e12-e06d8e76274a/3d80dc17-e330-4575-8e12-e06d8e76274a.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1004.580556] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef96ec65-a5cf-49bd-971c-237c52d18ddb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.596604] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1004.596604] env[63538]: value = "task-5101419" [ 1004.596604] env[63538]: _type = "Task" [ 1004.596604] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.605930] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101419, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.846683] env[63538]: DEBUG oslo_vmware.api [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101418, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.013956] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7e053699-6242-4849-963c-ead9e0cd3cab tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.521s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.017535] env[63538]: DEBUG oslo_concurrency.lockutils [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.291s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.019633] env[63538]: INFO nova.compute.claims [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1005.108064] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101419, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.167217] env[63538]: DEBUG nova.compute.manager [req-f509a7f5-5e8c-4490-a26b-0947a3da23f8 req-ff4eff6a-3ece-43df-a18c-eb391f2275dc service nova] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Received event network-vif-plugged-916b9a1d-5118-4f57-b7a9-9ca7fd8c9655 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1005.167502] env[63538]: DEBUG oslo_concurrency.lockutils [req-f509a7f5-5e8c-4490-a26b-0947a3da23f8 req-ff4eff6a-3ece-43df-a18c-eb391f2275dc service nova] Acquiring lock "4f81dc4e-2092-4a2c-a511-589d47d118b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.168100] env[63538]: DEBUG oslo_concurrency.lockutils [req-f509a7f5-5e8c-4490-a26b-0947a3da23f8 req-ff4eff6a-3ece-43df-a18c-eb391f2275dc service nova] Lock "4f81dc4e-2092-4a2c-a511-589d47d118b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.168100] env[63538]: DEBUG oslo_concurrency.lockutils [req-f509a7f5-5e8c-4490-a26b-0947a3da23f8 req-ff4eff6a-3ece-43df-a18c-eb391f2275dc service nova] Lock "4f81dc4e-2092-4a2c-a511-589d47d118b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.168371] env[63538]: DEBUG nova.compute.manager [req-f509a7f5-5e8c-4490-a26b-0947a3da23f8 req-ff4eff6a-3ece-43df-a18c-eb391f2275dc service nova] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] No waiting events found dispatching network-vif-plugged-916b9a1d-5118-4f57-b7a9-9ca7fd8c9655 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1005.168371] env[63538]: WARNING nova.compute.manager [req-f509a7f5-5e8c-4490-a26b-0947a3da23f8 req-ff4eff6a-3ece-43df-a18c-eb391f2275dc service nova] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Received unexpected event network-vif-plugged-916b9a1d-5118-4f57-b7a9-9ca7fd8c9655 for instance with vm_state building and task_state spawning. [ 1005.293643] env[63538]: DEBUG nova.network.neutron [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Successfully updated port: 916b9a1d-5118-4f57-b7a9-9ca7fd8c9655 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1005.347536] env[63538]: DEBUG oslo_vmware.api [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101418, 'name': PowerOnVM_Task, 'duration_secs': 0.716397} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.347536] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1005.348076] env[63538]: INFO nova.compute.manager [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Took 7.71 seconds to spawn the instance on the hypervisor. [ 1005.348076] env[63538]: DEBUG nova.compute.manager [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1005.349380] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04cecb44-fd70-4284-91d0-e785749d0441 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.589369] env[63538]: INFO nova.scheduler.client.report [None req-7e053699-6242-4849-963c-ead9e0cd3cab tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Deleted allocation for migration 798396a5-59d2-4cd0-956e-72af70feb5c4 [ 1005.608015] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101419, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.600608} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.608314] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 3d80dc17-e330-4575-8e12-e06d8e76274a/3d80dc17-e330-4575-8e12-e06d8e76274a.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1005.608541] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1005.608822] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5872f5f2-e18c-407c-a0e4-a72fd2412d89 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.616905] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1005.616905] env[63538]: value = "task-5101420" [ 1005.616905] env[63538]: _type = "Task" [ 1005.616905] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.630092] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101420, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.797110] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Acquiring lock "refresh_cache-4f81dc4e-2092-4a2c-a511-589d47d118b6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1005.797283] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Acquired lock "refresh_cache-4f81dc4e-2092-4a2c-a511-589d47d118b6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.797444] env[63538]: DEBUG nova.network.neutron [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1005.866838] env[63538]: INFO nova.compute.manager [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Took 29.15 seconds to build instance. [ 1006.095375] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7e053699-6242-4849-963c-ead9e0cd3cab tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "4ec5d3a2-8b29-4074-b323-f94704043b8b" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 25.691s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.128536] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101420, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.127202} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.131386] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1006.132444] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f54aa11-8a93-42e2-8af2-7967e4e5adc3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.158419] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] 3d80dc17-e330-4575-8e12-e06d8e76274a/3d80dc17-e330-4575-8e12-e06d8e76274a.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1006.161411] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53e21cb2-25df-4d4b-a4c7-072a943e1eeb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.183643] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1006.183643] env[63538]: value = "task-5101421" [ 1006.183643] env[63538]: _type = "Task" [ 1006.183643] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.195289] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101421, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.364171] env[63538]: DEBUG nova.network.neutron [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1006.369672] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7d0e4499-d3b6-4090-8d48-b668f9eb92ce tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "431a67e6-b90d-4930-9a86-7c49d1022ddc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.668s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.400030] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32be8f71-61f6-4711-bb85-785b081e1a33 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.408435] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55799d43-bb41-4d93-baa3-ab6d7fa1f183 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.445189] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eaba5c5-8960-4cdf-8146-2917da486372 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.455851] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e24fba5e-16b1-45e3-ac8d-210dfa366ad6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.470600] env[63538]: DEBUG nova.compute.provider_tree [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1006.529480] env[63538]: DEBUG nova.network.neutron [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Updating instance_info_cache with network_info: [{"id": "916b9a1d-5118-4f57-b7a9-9ca7fd8c9655", "address": "fa:16:3e:b4:e9:f6", "network": {"id": "48fc2432-57aa-48c6-96ce-8df6e61e6a67", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-285072277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f252fb32fc84f23b6ace53cee7f877e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap916b9a1d-51", "ovs_interfaceid": "916b9a1d-5118-4f57-b7a9-9ca7fd8c9655", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.694221] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101421, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.973418] env[63538]: DEBUG nova.scheduler.client.report [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1007.032734] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Releasing lock "refresh_cache-4f81dc4e-2092-4a2c-a511-589d47d118b6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.033100] env[63538]: DEBUG nova.compute.manager [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Instance network_info: |[{"id": "916b9a1d-5118-4f57-b7a9-9ca7fd8c9655", "address": "fa:16:3e:b4:e9:f6", "network": {"id": "48fc2432-57aa-48c6-96ce-8df6e61e6a67", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-285072277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f252fb32fc84f23b6ace53cee7f877e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap916b9a1d-51", "ovs_interfaceid": "916b9a1d-5118-4f57-b7a9-9ca7fd8c9655", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1007.033603] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:e9:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0549820d-5649-40bc-ad6e-9ae27b384d90', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '916b9a1d-5118-4f57-b7a9-9ca7fd8c9655', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1007.042952] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Creating folder: Project (7f252fb32fc84f23b6ace53cee7f877e). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1007.043187] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b5387a08-41b1-4510-bbbd-a4eeb75a7fd8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.056782] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Created folder: Project (7f252fb32fc84f23b6ace53cee7f877e) in parent group-v992234. [ 1007.056782] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Creating folder: Instances. Parent ref: group-v992472. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1007.056985] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8717cded-e0bc-447b-a0a0-f66d6b1066fc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.067350] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Created folder: Instances in parent group-v992472. [ 1007.067608] env[63538]: DEBUG oslo.service.loopingcall [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1007.067817] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1007.068067] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6f28a431-eb79-46a8-91d2-3319d99ba543 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.093157] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1007.093157] env[63538]: value = "task-5101424" [ 1007.093157] env[63538]: _type = "Task" [ 1007.093157] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.101563] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101424, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.196628] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101421, 'name': ReconfigVM_Task, 'duration_secs': 0.97689} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.196936] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Reconfigured VM instance instance-00000059 to attach disk [datastore2] 3d80dc17-e330-4575-8e12-e06d8e76274a/3d80dc17-e330-4575-8e12-e06d8e76274a.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1007.197613] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f7622a77-4a47-4abb-8ecf-e6017ca34b9c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.201539] env[63538]: DEBUG nova.compute.manager [req-c093b02f-6f91-42b4-9d39-136dfa62f50b req-177c9212-1f72-4ed6-a8da-381b137d90ad service nova] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Received event network-changed-916b9a1d-5118-4f57-b7a9-9ca7fd8c9655 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1007.201727] env[63538]: DEBUG nova.compute.manager [req-c093b02f-6f91-42b4-9d39-136dfa62f50b req-177c9212-1f72-4ed6-a8da-381b137d90ad service nova] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Refreshing instance network info cache due to event network-changed-916b9a1d-5118-4f57-b7a9-9ca7fd8c9655. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1007.201953] env[63538]: DEBUG oslo_concurrency.lockutils [req-c093b02f-6f91-42b4-9d39-136dfa62f50b req-177c9212-1f72-4ed6-a8da-381b137d90ad service nova] Acquiring lock "refresh_cache-4f81dc4e-2092-4a2c-a511-589d47d118b6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1007.202117] env[63538]: DEBUG oslo_concurrency.lockutils [req-c093b02f-6f91-42b4-9d39-136dfa62f50b req-177c9212-1f72-4ed6-a8da-381b137d90ad service nova] Acquired lock "refresh_cache-4f81dc4e-2092-4a2c-a511-589d47d118b6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.202285] env[63538]: DEBUG nova.network.neutron [req-c093b02f-6f91-42b4-9d39-136dfa62f50b req-177c9212-1f72-4ed6-a8da-381b137d90ad service nova] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Refreshing network info cache for port 916b9a1d-5118-4f57-b7a9-9ca7fd8c9655 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1007.209762] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1007.209762] env[63538]: value = "task-5101425" [ 1007.209762] env[63538]: _type = "Task" [ 1007.209762] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.221670] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101425, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.478982] env[63538]: DEBUG oslo_concurrency.lockutils [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.461s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.479719] env[63538]: DEBUG nova.compute.manager [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1007.483147] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.621s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.485420] env[63538]: INFO nova.compute.claims [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1007.594072] env[63538]: DEBUG oslo_concurrency.lockutils [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "4ec5d3a2-8b29-4074-b323-f94704043b8b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.594072] env[63538]: DEBUG oslo_concurrency.lockutils [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "4ec5d3a2-8b29-4074-b323-f94704043b8b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.594268] env[63538]: DEBUG oslo_concurrency.lockutils [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "4ec5d3a2-8b29-4074-b323-f94704043b8b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.594302] env[63538]: DEBUG oslo_concurrency.lockutils [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "4ec5d3a2-8b29-4074-b323-f94704043b8b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.594475] env[63538]: DEBUG oslo_concurrency.lockutils [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "4ec5d3a2-8b29-4074-b323-f94704043b8b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.602563] env[63538]: INFO nova.compute.manager [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Terminating instance [ 1007.606249] env[63538]: DEBUG nova.compute.manager [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1007.606249] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1007.606469] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee5a29f8-33b9-45ee-93d6-28c930fa9c4b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.612656] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101424, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.618138] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1007.618428] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f6e2292a-47f7-4d99-a893-45a2fc02415f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.625438] env[63538]: DEBUG oslo_vmware.api [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1007.625438] env[63538]: value = "task-5101426" [ 1007.625438] env[63538]: _type = "Task" [ 1007.625438] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.635755] env[63538]: DEBUG oslo_vmware.api [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101426, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.724352] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101425, 'name': Rename_Task, 'duration_secs': 0.248166} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.724962] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1007.725477] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3dcb00b3-8287-4ce8-85fb-d92cd02a0238 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.735281] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1007.735281] env[63538]: value = "task-5101427" [ 1007.735281] env[63538]: _type = "Task" [ 1007.735281] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.746067] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101427, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.991029] env[63538]: DEBUG nova.compute.utils [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1007.994486] env[63538]: DEBUG nova.compute.manager [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1007.994654] env[63538]: DEBUG nova.network.neutron [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1008.032716] env[63538]: DEBUG nova.network.neutron [req-c093b02f-6f91-42b4-9d39-136dfa62f50b req-177c9212-1f72-4ed6-a8da-381b137d90ad service nova] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Updated VIF entry in instance network info cache for port 916b9a1d-5118-4f57-b7a9-9ca7fd8c9655. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1008.033374] env[63538]: DEBUG nova.network.neutron [req-c093b02f-6f91-42b4-9d39-136dfa62f50b req-177c9212-1f72-4ed6-a8da-381b137d90ad service nova] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Updating instance_info_cache with network_info: [{"id": "916b9a1d-5118-4f57-b7a9-9ca7fd8c9655", "address": "fa:16:3e:b4:e9:f6", "network": {"id": "48fc2432-57aa-48c6-96ce-8df6e61e6a67", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-285072277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f252fb32fc84f23b6ace53cee7f877e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap916b9a1d-51", "ovs_interfaceid": "916b9a1d-5118-4f57-b7a9-9ca7fd8c9655", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.079854] env[63538]: DEBUG nova.policy [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '10be6fb1393942458bb8d0c2f7dd301f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e0c099142ba469f8d4e5cf2fa56d03a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1008.108059] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101424, 'name': CreateVM_Task, 'duration_secs': 0.654711} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.108329] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1008.109244] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.109502] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.109983] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1008.110403] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb2d240b-612c-4ca7-aa2b-7e7693d22cf2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.116364] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Waiting for the task: (returnval){ [ 1008.116364] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522311f5-dbac-579b-0aac-7f5ca8282548" [ 1008.116364] env[63538]: _type = "Task" [ 1008.116364] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.126892] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522311f5-dbac-579b-0aac-7f5ca8282548, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.135908] env[63538]: DEBUG oslo_vmware.api [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101426, 'name': PowerOffVM_Task, 'duration_secs': 0.207239} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.137893] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1008.138131] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1008.140836] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-10ec47c0-5507-427c-bc95-d13dc6e62a43 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.143215] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.143451] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.214027] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1008.214027] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1008.214027] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Deleting the datastore file [datastore1] 4ec5d3a2-8b29-4074-b323-f94704043b8b {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1008.214027] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-900c6bf3-87f9-4b9c-91ab-6acc111f3ec6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.222641] env[63538]: DEBUG oslo_vmware.api [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1008.222641] env[63538]: value = "task-5101429" [ 1008.222641] env[63538]: _type = "Task" [ 1008.222641] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.231010] env[63538]: DEBUG oslo_vmware.api [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101429, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.246893] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101427, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.385719] env[63538]: DEBUG nova.network.neutron [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Successfully created port: 7bb10fc8-2099-48a7-bad4-846efd238d94 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1008.495753] env[63538]: DEBUG nova.compute.manager [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1008.535572] env[63538]: DEBUG oslo_concurrency.lockutils [req-c093b02f-6f91-42b4-9d39-136dfa62f50b req-177c9212-1f72-4ed6-a8da-381b137d90ad service nova] Releasing lock "refresh_cache-4f81dc4e-2092-4a2c-a511-589d47d118b6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1008.535858] env[63538]: DEBUG nova.compute.manager [req-c093b02f-6f91-42b4-9d39-136dfa62f50b req-177c9212-1f72-4ed6-a8da-381b137d90ad service nova] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Received event network-changed-c9561ca9-cb68-4037-807e-9f89307cb528 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1008.536057] env[63538]: DEBUG nova.compute.manager [req-c093b02f-6f91-42b4-9d39-136dfa62f50b req-177c9212-1f72-4ed6-a8da-381b137d90ad service nova] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Refreshing instance network info cache due to event network-changed-c9561ca9-cb68-4037-807e-9f89307cb528. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1008.536333] env[63538]: DEBUG oslo_concurrency.lockutils [req-c093b02f-6f91-42b4-9d39-136dfa62f50b req-177c9212-1f72-4ed6-a8da-381b137d90ad service nova] Acquiring lock "refresh_cache-431a67e6-b90d-4930-9a86-7c49d1022ddc" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.536425] env[63538]: DEBUG oslo_concurrency.lockutils [req-c093b02f-6f91-42b4-9d39-136dfa62f50b req-177c9212-1f72-4ed6-a8da-381b137d90ad service nova] Acquired lock "refresh_cache-431a67e6-b90d-4930-9a86-7c49d1022ddc" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.536592] env[63538]: DEBUG nova.network.neutron [req-c093b02f-6f91-42b4-9d39-136dfa62f50b req-177c9212-1f72-4ed6-a8da-381b137d90ad service nova] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Refreshing network info cache for port c9561ca9-cb68-4037-807e-9f89307cb528 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1008.632153] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522311f5-dbac-579b-0aac-7f5ca8282548, 'name': SearchDatastore_Task, 'duration_secs': 0.01229} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.632153] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1008.632275] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1008.632510] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.633348] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.633348] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1008.634491] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9ad0fab0-1bbb-4b76-9545-f87016e6e797 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.646161] env[63538]: DEBUG nova.compute.manager [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1008.650965] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1008.650965] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1008.651923] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5896537-9d72-421f-b195-6085d56a4b54 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.666705] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Waiting for the task: (returnval){ [ 1008.666705] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5234f5c7-243d-2b90-7575-5873240d5abc" [ 1008.666705] env[63538]: _type = "Task" [ 1008.666705] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.678148] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5234f5c7-243d-2b90-7575-5873240d5abc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.732649] env[63538]: DEBUG oslo_vmware.api [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101429, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.282131} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.735618] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1008.735806] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1008.736164] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1008.736251] env[63538]: INFO nova.compute.manager [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1008.736445] env[63538]: DEBUG oslo.service.loopingcall [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1008.736946] env[63538]: DEBUG nova.compute.manager [-] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1008.737110] env[63538]: DEBUG nova.network.neutron [-] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1008.748454] env[63538]: DEBUG oslo_vmware.api [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101427, 'name': PowerOnVM_Task, 'duration_secs': 0.639701} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.751847] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1008.752096] env[63538]: INFO nova.compute.manager [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Took 8.63 seconds to spawn the instance on the hypervisor. [ 1008.752282] env[63538]: DEBUG nova.compute.manager [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1008.753421] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6795cc76-e4d8-4df6-91f3-f36f175d9d6c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.894272] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf4da1d6-0bfd-477b-88f2-3bca1faf44ad {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.903600] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d77644f-95e9-4c82-adcb-fcf42b7f18f1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.938040] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d056f914-8417-4fdf-b63a-0ac31b8846d8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.946633] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e80c6bf-cb04-40a4-bf5b-e5991b4414bd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.968143] env[63538]: DEBUG nova.compute.provider_tree [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1009.184628] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5234f5c7-243d-2b90-7575-5873240d5abc, 'name': SearchDatastore_Task, 'duration_secs': 0.040236} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.188867] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.190750] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-332c70e4-04cd-4cb6-bfb5-f3cae0e0c8f7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.198966] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Waiting for the task: (returnval){ [ 1009.198966] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d41682-0e2c-a60b-c714-62e9c3316faf" [ 1009.198966] env[63538]: _type = "Task" [ 1009.198966] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.214425] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d41682-0e2c-a60b-c714-62e9c3316faf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.249627] env[63538]: DEBUG nova.compute.manager [req-49462869-fc4d-4b9b-b5f9-f988d3f3c04a req-c80925aa-b020-4f61-8dce-185d778354c9 service nova] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Received event network-vif-deleted-31cf3b33-b97d-4183-a21c-80e24e27351f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1009.249840] env[63538]: INFO nova.compute.manager [req-49462869-fc4d-4b9b-b5f9-f988d3f3c04a req-c80925aa-b020-4f61-8dce-185d778354c9 service nova] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Neutron deleted interface 31cf3b33-b97d-4183-a21c-80e24e27351f; detaching it from the instance and deleting it from the info cache [ 1009.250035] env[63538]: DEBUG nova.network.neutron [req-49462869-fc4d-4b9b-b5f9-f988d3f3c04a req-c80925aa-b020-4f61-8dce-185d778354c9 service nova] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.273182] env[63538]: INFO nova.compute.manager [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Took 30.47 seconds to build instance. [ 1009.471653] env[63538]: DEBUG nova.scheduler.client.report [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1009.507523] env[63538]: DEBUG nova.compute.manager [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1009.538899] env[63538]: DEBUG nova.virt.hardware [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1009.539225] env[63538]: DEBUG nova.virt.hardware [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1009.539394] env[63538]: DEBUG nova.virt.hardware [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1009.539587] env[63538]: DEBUG nova.virt.hardware [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1009.539741] env[63538]: DEBUG nova.virt.hardware [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1009.539909] env[63538]: DEBUG nova.virt.hardware [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1009.540163] env[63538]: DEBUG nova.virt.hardware [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1009.540453] env[63538]: DEBUG nova.virt.hardware [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1009.540730] env[63538]: DEBUG nova.virt.hardware [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1009.541139] env[63538]: DEBUG nova.virt.hardware [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1009.541360] env[63538]: DEBUG nova.virt.hardware [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1009.541810] env[63538]: DEBUG nova.network.neutron [-] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.545070] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e90e93c8-6908-4502-94f5-1d0857cfae8e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.555763] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-732a3953-a986-4470-b588-7f7184354219 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.715064] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d41682-0e2c-a60b-c714-62e9c3316faf, 'name': SearchDatastore_Task, 'duration_secs': 0.017293} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.715421] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1009.715747] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 4f81dc4e-2092-4a2c-a511-589d47d118b6/4f81dc4e-2092-4a2c-a511-589d47d118b6.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1009.716060] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6e17bd87-eff4-4d00-888d-f0ef1f6e1629 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.727993] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Waiting for the task: (returnval){ [ 1009.727993] env[63538]: value = "task-5101430" [ 1009.727993] env[63538]: _type = "Task" [ 1009.727993] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.745182] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Task: {'id': task-5101430, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.754853] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dd9b0b3f-6c18-4422-a33f-841180ec7138 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.757576] env[63538]: DEBUG nova.network.neutron [req-c093b02f-6f91-42b4-9d39-136dfa62f50b req-177c9212-1f72-4ed6-a8da-381b137d90ad service nova] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Updated VIF entry in instance network info cache for port c9561ca9-cb68-4037-807e-9f89307cb528. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1009.758769] env[63538]: DEBUG nova.network.neutron [req-c093b02f-6f91-42b4-9d39-136dfa62f50b req-177c9212-1f72-4ed6-a8da-381b137d90ad service nova] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Updating instance_info_cache with network_info: [{"id": "c9561ca9-cb68-4037-807e-9f89307cb528", "address": "fa:16:3e:76:0e:f9", "network": {"id": "955bc6e2-cea0-4cf9-80fb-521ae0e565f0", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-726504029-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9427981aac124f6aa0c4d8d45b0ae917", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c297fe21-cd0b-4226-813b-a65d2358d034", "external-id": "nsx-vlan-transportzone-98", "segmentation_id": 98, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9561ca9-cb", "ovs_interfaceid": "c9561ca9-cb68-4037-807e-9f89307cb528", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.763277] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d825c52c-92d5-4a95-b30f-2256d93d525c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.775196] env[63538]: DEBUG oslo_concurrency.lockutils [None req-96abd2fe-d5c9-47e3-8cf3-ea2149f2df1a tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "3d80dc17-e330-4575-8e12-e06d8e76274a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.978s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.807555] env[63538]: DEBUG nova.compute.manager [req-49462869-fc4d-4b9b-b5f9-f988d3f3c04a req-c80925aa-b020-4f61-8dce-185d778354c9 service nova] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Detach interface failed, port_id=31cf3b33-b97d-4183-a21c-80e24e27351f, reason: Instance 4ec5d3a2-8b29-4074-b323-f94704043b8b could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1009.978032] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.494s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.978032] env[63538]: DEBUG nova.compute.manager [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1009.980812] env[63538]: DEBUG oslo_concurrency.lockutils [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.617s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.980915] env[63538]: DEBUG nova.objects.instance [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Lazy-loading 'resources' on Instance uuid e79a9eeb-a4c4-4613-bc43-4e40103addf9 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1009.992792] env[63538]: DEBUG nova.compute.manager [req-471596f9-96cc-4300-b748-6c30a8ed4ee8 req-1cf4ae76-a5ba-425a-a9a3-e5f434b7e399 service nova] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Received event network-vif-plugged-7bb10fc8-2099-48a7-bad4-846efd238d94 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1009.993598] env[63538]: DEBUG oslo_concurrency.lockutils [req-471596f9-96cc-4300-b748-6c30a8ed4ee8 req-1cf4ae76-a5ba-425a-a9a3-e5f434b7e399 service nova] Acquiring lock "144df97e-f47b-4ead-8243-345d98b9f3e6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.993911] env[63538]: DEBUG oslo_concurrency.lockutils [req-471596f9-96cc-4300-b748-6c30a8ed4ee8 req-1cf4ae76-a5ba-425a-a9a3-e5f434b7e399 service nova] Lock "144df97e-f47b-4ead-8243-345d98b9f3e6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.994650] env[63538]: DEBUG oslo_concurrency.lockutils [req-471596f9-96cc-4300-b748-6c30a8ed4ee8 req-1cf4ae76-a5ba-425a-a9a3-e5f434b7e399 service nova] Lock "144df97e-f47b-4ead-8243-345d98b9f3e6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.994650] env[63538]: DEBUG nova.compute.manager [req-471596f9-96cc-4300-b748-6c30a8ed4ee8 req-1cf4ae76-a5ba-425a-a9a3-e5f434b7e399 service nova] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] No waiting events found dispatching network-vif-plugged-7bb10fc8-2099-48a7-bad4-846efd238d94 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1009.994877] env[63538]: WARNING nova.compute.manager [req-471596f9-96cc-4300-b748-6c30a8ed4ee8 req-1cf4ae76-a5ba-425a-a9a3-e5f434b7e399 service nova] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Received unexpected event network-vif-plugged-7bb10fc8-2099-48a7-bad4-846efd238d94 for instance with vm_state building and task_state spawning. [ 1010.049739] env[63538]: INFO nova.compute.manager [-] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Took 1.31 seconds to deallocate network for instance. [ 1010.153782] env[63538]: DEBUG nova.network.neutron [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Successfully updated port: 7bb10fc8-2099-48a7-bad4-846efd238d94 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1010.246905] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Task: {'id': task-5101430, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.261089] env[63538]: DEBUG oslo_concurrency.lockutils [req-c093b02f-6f91-42b4-9d39-136dfa62f50b req-177c9212-1f72-4ed6-a8da-381b137d90ad service nova] Releasing lock "refresh_cache-431a67e6-b90d-4930-9a86-7c49d1022ddc" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1010.492022] env[63538]: DEBUG nova.compute.utils [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1010.498248] env[63538]: DEBUG nova.compute.manager [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1010.498248] env[63538]: DEBUG nova.network.neutron [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1010.537856] env[63538]: DEBUG nova.policy [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '21f98d3c77454d809c0aa1ca5c7dc6d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '492427e54e1048f292dab2abdac71af5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1010.559713] env[63538]: DEBUG oslo_concurrency.lockutils [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.655842] env[63538]: DEBUG oslo_concurrency.lockutils [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Acquiring lock "refresh_cache-144df97e-f47b-4ead-8243-345d98b9f3e6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1010.655928] env[63538]: DEBUG oslo_concurrency.lockutils [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Acquired lock "refresh_cache-144df97e-f47b-4ead-8243-345d98b9f3e6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.656096] env[63538]: DEBUG nova.network.neutron [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1010.746838] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Task: {'id': task-5101430, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.561525} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.747035] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 4f81dc4e-2092-4a2c-a511-589d47d118b6/4f81dc4e-2092-4a2c-a511-589d47d118b6.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1010.748190] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1010.748190] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-df45af87-f383-476e-a088-4ad9c4906de5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.755809] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Waiting for the task: (returnval){ [ 1010.755809] env[63538]: value = "task-5101431" [ 1010.755809] env[63538]: _type = "Task" [ 1010.755809] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.767436] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Task: {'id': task-5101431, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.817828] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa8df071-972d-4600-a9e4-c45ce034e6e2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.826058] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc6f697a-c068-4b05-8651-b395bc0d162c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.830542] env[63538]: DEBUG nova.network.neutron [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Successfully created port: bcfdefa5-ccee-41b2-8eeb-88554605c6e3 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1010.861598] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b867cd-f390-4eb9-9d1e-096f72eb80c5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.869974] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-404cb8ee-2b28-4b0d-8e74-0d5a8eb163dc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.885254] env[63538]: DEBUG nova.compute.provider_tree [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1010.999128] env[63538]: DEBUG nova.compute.manager [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1011.202527] env[63538]: DEBUG nova.network.neutron [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1011.265471] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Task: {'id': task-5101431, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077247} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.265829] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1011.266919] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fdf45e7-4927-4488-982c-ead681e320ad {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.289669] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] 4f81dc4e-2092-4a2c-a511-589d47d118b6/4f81dc4e-2092-4a2c-a511-589d47d118b6.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1011.295663] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e8f74ff-76ea-4bbc-9efb-987d12f794a8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.312095] env[63538]: DEBUG nova.compute.manager [req-0417fcfb-9343-4b58-a3f7-52d89d7f497f req-e7e2a9f5-9d81-4cb7-a877-66c37e8456f8 service nova] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Received event network-changed-2bf5c751-02ce-4e9e-8e98-68c3505b8aec {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1011.312517] env[63538]: DEBUG nova.compute.manager [req-0417fcfb-9343-4b58-a3f7-52d89d7f497f req-e7e2a9f5-9d81-4cb7-a877-66c37e8456f8 service nova] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Refreshing instance network info cache due to event network-changed-2bf5c751-02ce-4e9e-8e98-68c3505b8aec. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1011.312877] env[63538]: DEBUG oslo_concurrency.lockutils [req-0417fcfb-9343-4b58-a3f7-52d89d7f497f req-e7e2a9f5-9d81-4cb7-a877-66c37e8456f8 service nova] Acquiring lock "refresh_cache-3d80dc17-e330-4575-8e12-e06d8e76274a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.313158] env[63538]: DEBUG oslo_concurrency.lockutils [req-0417fcfb-9343-4b58-a3f7-52d89d7f497f req-e7e2a9f5-9d81-4cb7-a877-66c37e8456f8 service nova] Acquired lock "refresh_cache-3d80dc17-e330-4575-8e12-e06d8e76274a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.313458] env[63538]: DEBUG nova.network.neutron [req-0417fcfb-9343-4b58-a3f7-52d89d7f497f req-e7e2a9f5-9d81-4cb7-a877-66c37e8456f8 service nova] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Refreshing network info cache for port 2bf5c751-02ce-4e9e-8e98-68c3505b8aec {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1011.325407] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Waiting for the task: (returnval){ [ 1011.325407] env[63538]: value = "task-5101432" [ 1011.325407] env[63538]: _type = "Task" [ 1011.325407] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.341920] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Task: {'id': task-5101432, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.393204] env[63538]: DEBUG nova.scheduler.client.report [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1011.397966] env[63538]: DEBUG nova.network.neutron [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Updating instance_info_cache with network_info: [{"id": "7bb10fc8-2099-48a7-bad4-846efd238d94", "address": "fa:16:3e:47:4a:58", "network": {"id": "9b47978c-2396-444f-8206-c5391fc33cd8", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-784107468-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e0c099142ba469f8d4e5cf2fa56d03a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a3f99df-d1bc-4a37-a048-263445d4a7b0", "external-id": "nsx-vlan-transportzone-374", "segmentation_id": 374, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7bb10fc8-20", "ovs_interfaceid": "7bb10fc8-2099-48a7-bad4-846efd238d94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.832025] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5f3aef65-4aa8-402c-b64b-caf199cb4e10 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "edcc5700-7b1e-494a-82d1-844373a9d5a6" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.832025] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5f3aef65-4aa8-402c-b64b-caf199cb4e10 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "edcc5700-7b1e-494a-82d1-844373a9d5a6" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.840759] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Task: {'id': task-5101432, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.898962] env[63538]: DEBUG oslo_concurrency.lockutils [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.918s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.902355] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.088s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.902729] env[63538]: DEBUG nova.objects.instance [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lazy-loading 'resources' on Instance uuid f5d92749-04d6-4935-8dc6-afb692222df0 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1011.904875] env[63538]: DEBUG oslo_concurrency.lockutils [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Releasing lock "refresh_cache-144df97e-f47b-4ead-8243-345d98b9f3e6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1011.905518] env[63538]: DEBUG nova.compute.manager [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Instance network_info: |[{"id": "7bb10fc8-2099-48a7-bad4-846efd238d94", "address": "fa:16:3e:47:4a:58", "network": {"id": "9b47978c-2396-444f-8206-c5391fc33cd8", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-784107468-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e0c099142ba469f8d4e5cf2fa56d03a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a3f99df-d1bc-4a37-a048-263445d4a7b0", "external-id": "nsx-vlan-transportzone-374", "segmentation_id": 374, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7bb10fc8-20", "ovs_interfaceid": "7bb10fc8-2099-48a7-bad4-846efd238d94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1011.905892] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:4a:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0a3f99df-d1bc-4a37-a048-263445d4a7b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7bb10fc8-2099-48a7-bad4-846efd238d94', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1011.914699] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Creating folder: Project (6e0c099142ba469f8d4e5cf2fa56d03a). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1011.915933] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-446af132-b947-4e93-a093-9f2512d237e3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.924968] env[63538]: INFO nova.scheduler.client.report [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Deleted allocations for instance e79a9eeb-a4c4-4613-bc43-4e40103addf9 [ 1011.934054] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Created folder: Project (6e0c099142ba469f8d4e5cf2fa56d03a) in parent group-v992234. [ 1011.934054] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Creating folder: Instances. Parent ref: group-v992475. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1011.934255] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2ee8bb30-250f-466a-8bc4-10a61b1c272f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.946173] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Created folder: Instances in parent group-v992475. [ 1011.946437] env[63538]: DEBUG oslo.service.loopingcall [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1011.946711] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1011.946925] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ae710ece-7786-49c0-84bb-a61ea1724a40 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.972197] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1011.972197] env[63538]: value = "task-5101435" [ 1011.972197] env[63538]: _type = "Task" [ 1011.972197] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.986313] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101435, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.009585] env[63538]: DEBUG nova.compute.manager [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1012.024930] env[63538]: DEBUG nova.compute.manager [req-75aad3e6-0aec-4749-8507-76846cea477c req-ac30b9d8-b52f-4a27-9c0b-5374dcdb2644 service nova] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Received event network-changed-7bb10fc8-2099-48a7-bad4-846efd238d94 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1012.024930] env[63538]: DEBUG nova.compute.manager [req-75aad3e6-0aec-4749-8507-76846cea477c req-ac30b9d8-b52f-4a27-9c0b-5374dcdb2644 service nova] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Refreshing instance network info cache due to event network-changed-7bb10fc8-2099-48a7-bad4-846efd238d94. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1012.024930] env[63538]: DEBUG oslo_concurrency.lockutils [req-75aad3e6-0aec-4749-8507-76846cea477c req-ac30b9d8-b52f-4a27-9c0b-5374dcdb2644 service nova] Acquiring lock "refresh_cache-144df97e-f47b-4ead-8243-345d98b9f3e6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1012.024930] env[63538]: DEBUG oslo_concurrency.lockutils [req-75aad3e6-0aec-4749-8507-76846cea477c req-ac30b9d8-b52f-4a27-9c0b-5374dcdb2644 service nova] Acquired lock "refresh_cache-144df97e-f47b-4ead-8243-345d98b9f3e6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.024930] env[63538]: DEBUG nova.network.neutron [req-75aad3e6-0aec-4749-8507-76846cea477c req-ac30b9d8-b52f-4a27-9c0b-5374dcdb2644 service nova] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Refreshing network info cache for port 7bb10fc8-2099-48a7-bad4-846efd238d94 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1012.044461] env[63538]: DEBUG nova.virt.hardware [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1012.044795] env[63538]: DEBUG nova.virt.hardware [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1012.044967] env[63538]: DEBUG nova.virt.hardware [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1012.045193] env[63538]: DEBUG nova.virt.hardware [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1012.045347] env[63538]: DEBUG nova.virt.hardware [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1012.045621] env[63538]: DEBUG nova.virt.hardware [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1012.045842] env[63538]: DEBUG nova.virt.hardware [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1012.046056] env[63538]: DEBUG nova.virt.hardware [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1012.046268] env[63538]: DEBUG nova.virt.hardware [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1012.046462] env[63538]: DEBUG nova.virt.hardware [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1012.046681] env[63538]: DEBUG nova.virt.hardware [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1012.048408] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-175deb76-601c-4507-80f2-c4bb96f1d76b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.064806] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aec48ce6-71ac-44eb-8309-a34c12e38e14 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.189626] env[63538]: DEBUG nova.network.neutron [req-0417fcfb-9343-4b58-a3f7-52d89d7f497f req-e7e2a9f5-9d81-4cb7-a877-66c37e8456f8 service nova] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Updated VIF entry in instance network info cache for port 2bf5c751-02ce-4e9e-8e98-68c3505b8aec. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1012.190023] env[63538]: DEBUG nova.network.neutron [req-0417fcfb-9343-4b58-a3f7-52d89d7f497f req-e7e2a9f5-9d81-4cb7-a877-66c37e8456f8 service nova] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Updating instance_info_cache with network_info: [{"id": "2bf5c751-02ce-4e9e-8e98-68c3505b8aec", "address": "fa:16:3e:90:72:79", "network": {"id": "690e6150-6e85-472d-baeb-85e69afd037b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1831468396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a06b7cc1ab24ba584bbe970e4fc5e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bf5c751-02", "ovs_interfaceid": "2bf5c751-02ce-4e9e-8e98-68c3505b8aec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.338026] env[63538]: INFO nova.compute.manager [None req-5f3aef65-4aa8-402c-b64b-caf199cb4e10 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Detaching volume 29ea8eee-32fd-404e-9a51-9211e8b1a496 [ 1012.345941] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Task: {'id': task-5101432, 'name': ReconfigVM_Task, 'duration_secs': 0.743508} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.348839] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Reconfigured VM instance instance-0000005a to attach disk [datastore2] 4f81dc4e-2092-4a2c-a511-589d47d118b6/4f81dc4e-2092-4a2c-a511-589d47d118b6.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1012.350957] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a59e91f0-95f9-44ea-8282-c7000a2274f7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.357422] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Waiting for the task: (returnval){ [ 1012.357422] env[63538]: value = "task-5101436" [ 1012.357422] env[63538]: _type = "Task" [ 1012.357422] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.369576] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Task: {'id': task-5101436, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.394795] env[63538]: INFO nova.virt.block_device [None req-5f3aef65-4aa8-402c-b64b-caf199cb4e10 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Attempting to driver detach volume 29ea8eee-32fd-404e-9a51-9211e8b1a496 from mountpoint /dev/sdb [ 1012.395138] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f3aef65-4aa8-402c-b64b-caf199cb4e10 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Volume detach. Driver type: vmdk {{(pid=63538) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1012.395405] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f3aef65-4aa8-402c-b64b-caf199cb4e10 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992456', 'volume_id': '29ea8eee-32fd-404e-9a51-9211e8b1a496', 'name': 'volume-29ea8eee-32fd-404e-9a51-9211e8b1a496', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'edcc5700-7b1e-494a-82d1-844373a9d5a6', 'attached_at': '', 'detached_at': '', 'volume_id': '29ea8eee-32fd-404e-9a51-9211e8b1a496', 'serial': '29ea8eee-32fd-404e-9a51-9211e8b1a496'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1012.396559] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1c98c1-b205-437b-84a7-66871d834731 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.422963] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe657c15-e7d3-45b1-ac24-2349cd1512d3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.431224] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-523e4061-fad7-4f21-8527-8b6260aac752 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.437408] env[63538]: DEBUG oslo_concurrency.lockutils [None req-86703d10-9d74-4df0-a8d0-2890b3f29260 tempest-SecurityGroupsTestJSON-2131609758 tempest-SecurityGroupsTestJSON-2131609758-project-member] Lock "e79a9eeb-a4c4-4613-bc43-4e40103addf9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.372s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.465250] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca593834-811c-4584-848a-3f3fed13d487 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.488505] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f3aef65-4aa8-402c-b64b-caf199cb4e10 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] The volume has not been displaced from its original location: [datastore2] volume-29ea8eee-32fd-404e-9a51-9211e8b1a496/volume-29ea8eee-32fd-404e-9a51-9211e8b1a496.vmdk. No consolidation needed. {{(pid=63538) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1012.498383] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f3aef65-4aa8-402c-b64b-caf199cb4e10 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Reconfiguring VM instance instance-00000048 to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1012.509126] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46b31614-2f9d-429f-bf1b-21e994fdd1cf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.525632] env[63538]: DEBUG nova.network.neutron [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Successfully updated port: bcfdefa5-ccee-41b2-8eeb-88554605c6e3 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1012.537264] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101435, 'name': CreateVM_Task, 'duration_secs': 0.371644} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.538459] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1012.539144] env[63538]: DEBUG oslo_vmware.api [None req-5f3aef65-4aa8-402c-b64b-caf199cb4e10 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 1012.539144] env[63538]: value = "task-5101437" [ 1012.539144] env[63538]: _type = "Task" [ 1012.539144] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.539554] env[63538]: DEBUG oslo_concurrency.lockutils [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1012.539787] env[63538]: DEBUG oslo_concurrency.lockutils [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.540240] env[63538]: DEBUG oslo_concurrency.lockutils [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1012.540954] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1dfc6818-465f-4d3c-93fe-d2e47c021293 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.553565] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Waiting for the task: (returnval){ [ 1012.553565] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c81a21-bc50-cbcf-4bee-7d434dbbe555" [ 1012.553565] env[63538]: _type = "Task" [ 1012.553565] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.558226] env[63538]: DEBUG oslo_vmware.api [None req-5f3aef65-4aa8-402c-b64b-caf199cb4e10 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101437, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.577160] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c81a21-bc50-cbcf-4bee-7d434dbbe555, 'name': SearchDatastore_Task, 'duration_secs': 0.017418} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.577460] env[63538]: DEBUG oslo_concurrency.lockutils [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.577709] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1012.577956] env[63538]: DEBUG oslo_concurrency.lockutils [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1012.578118] env[63538]: DEBUG oslo_concurrency.lockutils [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.578304] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1012.578578] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b3881f75-3ca2-4def-a5fb-28835bd1d712 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.597425] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1012.597597] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1012.601124] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-804bed41-b9ac-4d93-a241-3213c81d23c2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.608201] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Waiting for the task: (returnval){ [ 1012.608201] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526c21a7-e8f9-1f14-434e-60b6a7840720" [ 1012.608201] env[63538]: _type = "Task" [ 1012.608201] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.620689] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526c21a7-e8f9-1f14-434e-60b6a7840720, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.696973] env[63538]: DEBUG oslo_concurrency.lockutils [req-0417fcfb-9343-4b58-a3f7-52d89d7f497f req-e7e2a9f5-9d81-4cb7-a877-66c37e8456f8 service nova] Releasing lock "refresh_cache-3d80dc17-e330-4575-8e12-e06d8e76274a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.815929] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8e7bf33-0aa5-4ed6-8f56-77dd8dee5d36 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.824082] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a62082e-d58b-41db-bce7-ad14e5cb9927 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.855487] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8277de8c-2e4a-416e-9df2-464f16e5ffe9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.871977] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a3cfbdd-9768-439e-aac4-196cf7104ce4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.876272] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Task: {'id': task-5101436, 'name': Rename_Task, 'duration_secs': 0.257649} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.876453] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1012.877165] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d4ed3be0-2b12-4009-9eea-f86972a3687d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.886966] env[63538]: DEBUG nova.compute.provider_tree [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1012.893515] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Waiting for the task: (returnval){ [ 1012.893515] env[63538]: value = "task-5101438" [ 1012.893515] env[63538]: _type = "Task" [ 1012.893515] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.909152] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Task: {'id': task-5101438, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.030545] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "refresh_cache-0a0d0372-dede-4df0-bb9e-231e8a5b3742" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1013.031166] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired lock "refresh_cache-0a0d0372-dede-4df0-bb9e-231e8a5b3742" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.031714] env[63538]: DEBUG nova.network.neutron [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1013.054774] env[63538]: DEBUG oslo_vmware.api [None req-5f3aef65-4aa8-402c-b64b-caf199cb4e10 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101437, 'name': ReconfigVM_Task, 'duration_secs': 0.235624} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.055115] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f3aef65-4aa8-402c-b64b-caf199cb4e10 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Reconfigured VM instance instance-00000048 to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1013.060251] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d069a349-dcde-4421-97ef-301b80829a0e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.076967] env[63538]: DEBUG nova.network.neutron [req-75aad3e6-0aec-4749-8507-76846cea477c req-ac30b9d8-b52f-4a27-9c0b-5374dcdb2644 service nova] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Updated VIF entry in instance network info cache for port 7bb10fc8-2099-48a7-bad4-846efd238d94. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1013.077696] env[63538]: DEBUG nova.network.neutron [req-75aad3e6-0aec-4749-8507-76846cea477c req-ac30b9d8-b52f-4a27-9c0b-5374dcdb2644 service nova] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Updating instance_info_cache with network_info: [{"id": "7bb10fc8-2099-48a7-bad4-846efd238d94", "address": "fa:16:3e:47:4a:58", "network": {"id": "9b47978c-2396-444f-8206-c5391fc33cd8", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-784107468-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e0c099142ba469f8d4e5cf2fa56d03a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a3f99df-d1bc-4a37-a048-263445d4a7b0", "external-id": "nsx-vlan-transportzone-374", "segmentation_id": 374, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7bb10fc8-20", "ovs_interfaceid": "7bb10fc8-2099-48a7-bad4-846efd238d94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.081674] env[63538]: DEBUG oslo_vmware.api [None req-5f3aef65-4aa8-402c-b64b-caf199cb4e10 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 1013.081674] env[63538]: value = "task-5101439" [ 1013.081674] env[63538]: _type = "Task" [ 1013.081674] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.095746] env[63538]: DEBUG oslo_vmware.api [None req-5f3aef65-4aa8-402c-b64b-caf199cb4e10 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101439, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.121187] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526c21a7-e8f9-1f14-434e-60b6a7840720, 'name': SearchDatastore_Task, 'duration_secs': 0.013585} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.122041] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf07a979-1472-4d3f-824f-e02f0a969379 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.128510] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Waiting for the task: (returnval){ [ 1013.128510] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c075a1-016e-bbce-3c53-a64ae3b29fea" [ 1013.128510] env[63538]: _type = "Task" [ 1013.128510] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.138364] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c075a1-016e-bbce-3c53-a64ae3b29fea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.390665] env[63538]: DEBUG nova.scheduler.client.report [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1013.407065] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Task: {'id': task-5101438, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.586252] env[63538]: DEBUG oslo_concurrency.lockutils [req-75aad3e6-0aec-4749-8507-76846cea477c req-ac30b9d8-b52f-4a27-9c0b-5374dcdb2644 service nova] Releasing lock "refresh_cache-144df97e-f47b-4ead-8243-345d98b9f3e6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1013.588357] env[63538]: DEBUG nova.network.neutron [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1013.596375] env[63538]: DEBUG oslo_vmware.api [None req-5f3aef65-4aa8-402c-b64b-caf199cb4e10 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101439, 'name': ReconfigVM_Task, 'duration_secs': 0.171888} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.596915] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f3aef65-4aa8-402c-b64b-caf199cb4e10 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992456', 'volume_id': '29ea8eee-32fd-404e-9a51-9211e8b1a496', 'name': 'volume-29ea8eee-32fd-404e-9a51-9211e8b1a496', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'edcc5700-7b1e-494a-82d1-844373a9d5a6', 'attached_at': '', 'detached_at': '', 'volume_id': '29ea8eee-32fd-404e-9a51-9211e8b1a496', 'serial': '29ea8eee-32fd-404e-9a51-9211e8b1a496'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1013.642620] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c075a1-016e-bbce-3c53-a64ae3b29fea, 'name': SearchDatastore_Task, 'duration_secs': 0.014744} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.643528] env[63538]: DEBUG oslo_concurrency.lockutils [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1013.643964] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 144df97e-f47b-4ead-8243-345d98b9f3e6/144df97e-f47b-4ead-8243-345d98b9f3e6.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1013.644530] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e9c4a3ff-b3f5-40cf-a280-cd50007dce3d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.652688] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Waiting for the task: (returnval){ [ 1013.652688] env[63538]: value = "task-5101440" [ 1013.652688] env[63538]: _type = "Task" [ 1013.652688] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.664331] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Task: {'id': task-5101440, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.845361] env[63538]: DEBUG nova.network.neutron [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Updating instance_info_cache with network_info: [{"id": "bcfdefa5-ccee-41b2-8eeb-88554605c6e3", "address": "fa:16:3e:44:5e:da", "network": {"id": "01450801-f549-4844-80bd-208ec405c65f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684310085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "492427e54e1048f292dab2abdac71af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcfdefa5-cc", "ovs_interfaceid": "bcfdefa5-ccee-41b2-8eeb-88554605c6e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.899659] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.997s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1013.901938] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 20.582s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.914026] env[63538]: DEBUG oslo_vmware.api [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Task: {'id': task-5101438, 'name': PowerOnVM_Task, 'duration_secs': 0.995256} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.914026] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1013.914026] env[63538]: INFO nova.compute.manager [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Took 9.39 seconds to spawn the instance on the hypervisor. [ 1013.914026] env[63538]: DEBUG nova.compute.manager [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1013.914026] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-685868cd-5586-4d0c-88cf-04020debadcd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.930382] env[63538]: INFO nova.scheduler.client.report [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Deleted allocations for instance f5d92749-04d6-4935-8dc6-afb692222df0 [ 1014.075878] env[63538]: DEBUG nova.compute.manager [req-57b9877e-f7ad-4823-aca9-1357b804a247 req-9d660273-42be-495d-b150-c04566ccd5ce service nova] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Received event network-vif-plugged-bcfdefa5-ccee-41b2-8eeb-88554605c6e3 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1014.076317] env[63538]: DEBUG oslo_concurrency.lockutils [req-57b9877e-f7ad-4823-aca9-1357b804a247 req-9d660273-42be-495d-b150-c04566ccd5ce service nova] Acquiring lock "0a0d0372-dede-4df0-bb9e-231e8a5b3742-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.076738] env[63538]: DEBUG oslo_concurrency.lockutils [req-57b9877e-f7ad-4823-aca9-1357b804a247 req-9d660273-42be-495d-b150-c04566ccd5ce service nova] Lock "0a0d0372-dede-4df0-bb9e-231e8a5b3742-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.078145] env[63538]: DEBUG oslo_concurrency.lockutils [req-57b9877e-f7ad-4823-aca9-1357b804a247 req-9d660273-42be-495d-b150-c04566ccd5ce service nova] Lock "0a0d0372-dede-4df0-bb9e-231e8a5b3742-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.078624] env[63538]: DEBUG nova.compute.manager [req-57b9877e-f7ad-4823-aca9-1357b804a247 req-9d660273-42be-495d-b150-c04566ccd5ce service nova] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] No waiting events found dispatching network-vif-plugged-bcfdefa5-ccee-41b2-8eeb-88554605c6e3 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1014.078810] env[63538]: WARNING nova.compute.manager [req-57b9877e-f7ad-4823-aca9-1357b804a247 req-9d660273-42be-495d-b150-c04566ccd5ce service nova] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Received unexpected event network-vif-plugged-bcfdefa5-ccee-41b2-8eeb-88554605c6e3 for instance with vm_state building and task_state spawning. [ 1014.079673] env[63538]: DEBUG nova.compute.manager [req-57b9877e-f7ad-4823-aca9-1357b804a247 req-9d660273-42be-495d-b150-c04566ccd5ce service nova] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Received event network-changed-bcfdefa5-ccee-41b2-8eeb-88554605c6e3 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1014.079975] env[63538]: DEBUG nova.compute.manager [req-57b9877e-f7ad-4823-aca9-1357b804a247 req-9d660273-42be-495d-b150-c04566ccd5ce service nova] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Refreshing instance network info cache due to event network-changed-bcfdefa5-ccee-41b2-8eeb-88554605c6e3. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1014.080441] env[63538]: DEBUG oslo_concurrency.lockutils [req-57b9877e-f7ad-4823-aca9-1357b804a247 req-9d660273-42be-495d-b150-c04566ccd5ce service nova] Acquiring lock "refresh_cache-0a0d0372-dede-4df0-bb9e-231e8a5b3742" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1014.161610] env[63538]: DEBUG nova.objects.instance [None req-5f3aef65-4aa8-402c-b64b-caf199cb4e10 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lazy-loading 'flavor' on Instance uuid edcc5700-7b1e-494a-82d1-844373a9d5a6 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1014.179612] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Task: {'id': task-5101440, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.348454] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Releasing lock "refresh_cache-0a0d0372-dede-4df0-bb9e-231e8a5b3742" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.348799] env[63538]: DEBUG nova.compute.manager [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Instance network_info: |[{"id": "bcfdefa5-ccee-41b2-8eeb-88554605c6e3", "address": "fa:16:3e:44:5e:da", "network": {"id": "01450801-f549-4844-80bd-208ec405c65f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684310085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "492427e54e1048f292dab2abdac71af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcfdefa5-cc", "ovs_interfaceid": "bcfdefa5-ccee-41b2-8eeb-88554605c6e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1014.349158] env[63538]: DEBUG oslo_concurrency.lockutils [req-57b9877e-f7ad-4823-aca9-1357b804a247 req-9d660273-42be-495d-b150-c04566ccd5ce service nova] Acquired lock "refresh_cache-0a0d0372-dede-4df0-bb9e-231e8a5b3742" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.349312] env[63538]: DEBUG nova.network.neutron [req-57b9877e-f7ad-4823-aca9-1357b804a247 req-9d660273-42be-495d-b150-c04566ccd5ce service nova] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Refreshing network info cache for port bcfdefa5-ccee-41b2-8eeb-88554605c6e3 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1014.350593] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:5e:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32463b6d-4569-4755-8a29-873a028690a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bcfdefa5-ccee-41b2-8eeb-88554605c6e3', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1014.358516] env[63538]: DEBUG oslo.service.loopingcall [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1014.361528] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1014.362055] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7e9221d3-296c-4b9f-835b-80f58f416784 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.384900] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1014.384900] env[63538]: value = "task-5101441" [ 1014.384900] env[63538]: _type = "Task" [ 1014.384900] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.393771] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101441, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.436380] env[63538]: INFO nova.compute.manager [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Took 31.81 seconds to build instance. [ 1014.441660] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9c7b18b5-8a02-40f6-8e86-d2c08d46b9dc tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "f5d92749-04d6-4935-8dc6-afb692222df0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.696s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.676031] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Task: {'id': task-5101440, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.773882] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Acquiring lock "4f81dc4e-2092-4a2c-a511-589d47d118b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.787845] env[63538]: DEBUG nova.network.neutron [req-57b9877e-f7ad-4823-aca9-1357b804a247 req-9d660273-42be-495d-b150-c04566ccd5ce service nova] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Updated VIF entry in instance network info cache for port bcfdefa5-ccee-41b2-8eeb-88554605c6e3. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1014.788290] env[63538]: DEBUG nova.network.neutron [req-57b9877e-f7ad-4823-aca9-1357b804a247 req-9d660273-42be-495d-b150-c04566ccd5ce service nova] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Updating instance_info_cache with network_info: [{"id": "bcfdefa5-ccee-41b2-8eeb-88554605c6e3", "address": "fa:16:3e:44:5e:da", "network": {"id": "01450801-f549-4844-80bd-208ec405c65f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684310085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "492427e54e1048f292dab2abdac71af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcfdefa5-cc", "ovs_interfaceid": "bcfdefa5-ccee-41b2-8eeb-88554605c6e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.897070] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101441, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.938707] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b60e5752-9d1c-47a6-8420-a43a98788ab3 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Lock "4f81dc4e-2092-4a2c-a511-589d47d118b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.334s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.941425] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Lock "4f81dc4e-2092-4a2c-a511-589d47d118b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.166s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.941425] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Acquiring lock "4f81dc4e-2092-4a2c-a511-589d47d118b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.941425] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Lock "4f81dc4e-2092-4a2c-a511-589d47d118b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.941425] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Lock "4f81dc4e-2092-4a2c-a511-589d47d118b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.943086] env[63538]: INFO nova.compute.manager [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Terminating instance [ 1014.945792] env[63538]: DEBUG nova.compute.manager [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1014.946176] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1014.947473] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance ede967c0-ec3a-4f26-8290-0ee36890cd75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1014.947660] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 466be7db-79e4-49fd-aa3b-56fbe5c60457 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1014.947831] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 87f8bb3e-6f32-4850-ac54-efad0befb268 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1014.948093] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance de68a921-bf67-4794-923d-4e062d8ff802 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1014.948263] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance edcc5700-7b1e-494a-82d1-844373a9d5a6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1014.949213] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 90e56075-0d77-467f-90be-913315b63b33 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1014.949433] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 8ed0bd15-71fc-435e-9e4a-90b023ad8a79 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1014.949630] env[63538]: WARNING nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 4ec5d3a2-8b29-4074-b323-f94704043b8b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1014.949797] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 209c5f46-9c63-4f55-bc75-bc2e4da989ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1014.950480] env[63538]: WARNING nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 0339c969-ad97-47b1-8fab-ee595738d9df is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1014.950811] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance fb26fb32-a420-4667-850c-e32786edd8f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1014.951009] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 431a67e6-b90d-4930-9a86-7c49d1022ddc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1014.951177] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 3d80dc17-e330-4575-8e12-e06d8e76274a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1014.953480] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329f0078-25c1-4b8e-a5a7-275a8a5b071f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.966272] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1014.967406] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-733f20a8-f4e0-4f16-a099-e165506df1d1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.976954] env[63538]: DEBUG oslo_vmware.api [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Waiting for the task: (returnval){ [ 1014.976954] env[63538]: value = "task-5101442" [ 1014.976954] env[63538]: _type = "Task" [ 1014.976954] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.987464] env[63538]: DEBUG oslo_vmware.api [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Task: {'id': task-5101442, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.170492] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Task: {'id': task-5101440, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.179635] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5f3aef65-4aa8-402c-b64b-caf199cb4e10 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "edcc5700-7b1e-494a-82d1-844373a9d5a6" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.347s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.294166] env[63538]: DEBUG oslo_concurrency.lockutils [req-57b9877e-f7ad-4823-aca9-1357b804a247 req-9d660273-42be-495d-b150-c04566ccd5ce service nova] Releasing lock "refresh_cache-0a0d0372-dede-4df0-bb9e-231e8a5b3742" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1015.397946] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101441, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.462102] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 0df15328-aebd-44c5-9c78-ee05f188ad95 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1015.462102] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 4f81dc4e-2092-4a2c-a511-589d47d118b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1015.462102] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 144df97e-f47b-4ead-8243-345d98b9f3e6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1015.462102] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 0a0d0372-dede-4df0-bb9e-231e8a5b3742 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1015.490394] env[63538]: DEBUG oslo_vmware.api [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Task: {'id': task-5101442, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.674407] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Task: {'id': task-5101440, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.576709} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.675209] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 144df97e-f47b-4ead-8243-345d98b9f3e6/144df97e-f47b-4ead-8243-345d98b9f3e6.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1015.675561] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1015.676274] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-98dc2a88-5c60-4123-9067-2f8ce27270f7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.685461] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Waiting for the task: (returnval){ [ 1015.685461] env[63538]: value = "task-5101443" [ 1015.685461] env[63538]: _type = "Task" [ 1015.685461] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.694910] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Task: {'id': task-5101443, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.900169] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101441, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.965041] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 148790a7-0a35-4d26-ae9f-6f954a161c88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1015.991039] env[63538]: DEBUG oslo_vmware.api [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Task: {'id': task-5101442, 'name': PowerOffVM_Task, 'duration_secs': 0.680453} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.991039] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1015.991039] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1015.991039] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d077fa39-1b8a-4354-b807-61b93873efd6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.057022] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1016.057022] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1016.057194] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Deleting the datastore file [datastore2] 4f81dc4e-2092-4a2c-a511-589d47d118b6 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1016.057442] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7ece9e77-5e29-4be4-a855-8c12e99e3473 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.066021] env[63538]: DEBUG oslo_vmware.api [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Waiting for the task: (returnval){ [ 1016.066021] env[63538]: value = "task-5101445" [ 1016.066021] env[63538]: _type = "Task" [ 1016.066021] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.081881] env[63538]: DEBUG oslo_vmware.api [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Task: {'id': task-5101445, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.154260] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "7752c64f-693f-4cf3-951c-7ee0657f1682" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.154562] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "7752c64f-693f-4cf3-951c-7ee0657f1682" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.196452] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Task: {'id': task-5101443, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089971} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.196763] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1016.197597] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5da46df-a128-4690-8ec2-f084352b9682 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.223335] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 144df97e-f47b-4ead-8243-345d98b9f3e6/144df97e-f47b-4ead-8243-345d98b9f3e6.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1016.224148] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-846cc98e-0c8d-4e4c-b63e-b2f49ded497f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.245858] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Waiting for the task: (returnval){ [ 1016.245858] env[63538]: value = "task-5101446" [ 1016.245858] env[63538]: _type = "Task" [ 1016.245858] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.258189] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Task: {'id': task-5101446, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.398913] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "edcc5700-7b1e-494a-82d1-844373a9d5a6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.399197] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "edcc5700-7b1e-494a-82d1-844373a9d5a6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.399416] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "edcc5700-7b1e-494a-82d1-844373a9d5a6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.399612] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "edcc5700-7b1e-494a-82d1-844373a9d5a6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.399796] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "edcc5700-7b1e-494a-82d1-844373a9d5a6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.401810] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101441, 'name': CreateVM_Task, 'duration_secs': 1.647333} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.401912] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1016.402349] env[63538]: INFO nova.compute.manager [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Terminating instance [ 1016.404236] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1016.404403] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.404739] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1016.405399] env[63538]: DEBUG nova.compute.manager [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1016.405593] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1016.405867] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5b864a4-1c1f-41a0-94c6-e333f7454c18 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.408393] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8352e04a-95f0-4d43-a2ac-b0409098a9dd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.415059] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1016.415059] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5213e813-7693-6e49-5d1b-bbd691ee1a54" [ 1016.415059] env[63538]: _type = "Task" [ 1016.415059] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.418284] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1016.421618] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7dd4a5cc-2da0-4473-96de-93d436a57a44 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.429444] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5213e813-7693-6e49-5d1b-bbd691ee1a54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.430915] env[63538]: DEBUG oslo_vmware.api [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 1016.430915] env[63538]: value = "task-5101447" [ 1016.430915] env[63538]: _type = "Task" [ 1016.430915] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.439482] env[63538]: DEBUG oslo_vmware.api [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101447, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.467718] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1016.468019] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Total usable vcpus: 48, total allocated vcpus: 14 {{(pid=63538) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1016.468246] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3200MB phys_disk=100GB used_disk=14GB total_vcpus=48 used_vcpus=14 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '14', 'num_vm_active': '10', 'num_task_None': '11', 'num_os_type_None': '14', 'num_proj_ea05f3fb4676466bb2a286f5a2fefb8f': '1', 'io_workload': '3', 'num_proj_0d6954a5254f441ca256c85330297cef': '2', 'num_proj_55edcd65da7b4a569a4c27aab4819cde': '3', 'num_vm_rescued': '1', 'num_proj_422f50dc66ec48b7b262643390072f3d': '1', 'num_proj_7063c42297c24f2baf7271fa25dec927': '1', 'num_proj_df090f9a727d4cf4a0f466e27928bdc6': '1', 'num_proj_9427981aac124f6aa0c4d8d45b0ae917': '1', 'num_proj_1a06b7cc1ab24ba584bbe970e4fc5e81': '1', 'num_vm_building': '3', 'num_task_spawning': '3', 'num_proj_7f252fb32fc84f23b6ace53cee7f877e': '1', 'num_proj_6e0c099142ba469f8d4e5cf2fa56d03a': '1', 'num_proj_492427e54e1048f292dab2abdac71af5': '1'} {{(pid=63538) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1016.581575] env[63538]: DEBUG oslo_vmware.api [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Task: {'id': task-5101445, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.270897} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.581973] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1016.582205] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1016.582501] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1016.582781] env[63538]: INFO nova.compute.manager [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1016.583155] env[63538]: DEBUG oslo.service.loopingcall [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1016.583454] env[63538]: DEBUG nova.compute.manager [-] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1016.583648] env[63538]: DEBUG nova.network.neutron [-] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1016.656671] env[63538]: DEBUG nova.compute.manager [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1016.758941] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Task: {'id': task-5101446, 'name': ReconfigVM_Task, 'duration_secs': 0.359029} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.762529] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 144df97e-f47b-4ead-8243-345d98b9f3e6/144df97e-f47b-4ead-8243-345d98b9f3e6.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1016.763578] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-72957379-5e45-4a04-b7da-14d6d3a616c7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.773207] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Waiting for the task: (returnval){ [ 1016.773207] env[63538]: value = "task-5101448" [ 1016.773207] env[63538]: _type = "Task" [ 1016.773207] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.789112] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Task: {'id': task-5101448, 'name': Rename_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.802540] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ba0ccd6-1d4b-450e-a859-5a3ed7bed05c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.812624] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4a96ab-4918-4af3-a81c-f9625f35f1fa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.852552] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-439b5621-5d87-456c-a233-4fed447343ba {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.862523] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b7d0f6d-4f67-4f4c-a511-940a941ba124 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.882975] env[63538]: DEBUG nova.compute.provider_tree [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1016.928357] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5213e813-7693-6e49-5d1b-bbd691ee1a54, 'name': SearchDatastore_Task, 'duration_secs': 0.017226} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.928712] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1016.928951] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1016.929215] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1016.929398] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.929663] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1016.929966] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-074d9e36-e834-4ca2-b62c-cd4cb2020bc2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.940825] env[63538]: DEBUG oslo_vmware.api [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101447, 'name': PowerOffVM_Task, 'duration_secs': 0.206026} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.941193] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1016.941317] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1016.941580] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f5111958-c32f-4e49-b17a-6b44b1357683 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.944642] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1016.944860] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1016.946118] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4ff53bc-746f-4b2e-816a-3b19ba545d10 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.951705] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1016.951705] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5201dc45-ceda-3e0f-2ac4-19440c014312" [ 1016.951705] env[63538]: _type = "Task" [ 1016.951705] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.967043] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5201dc45-ceda-3e0f-2ac4-19440c014312, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.020119] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1017.020392] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1017.020587] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Deleting the datastore file [datastore2] edcc5700-7b1e-494a-82d1-844373a9d5a6 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1017.020882] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e4ec0fd3-6091-4b75-bf25-acfeeefac075 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.030040] env[63538]: DEBUG oslo_vmware.api [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 1017.030040] env[63538]: value = "task-5101450" [ 1017.030040] env[63538]: _type = "Task" [ 1017.030040] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.040735] env[63538]: DEBUG oslo_vmware.api [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101450, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.087851] env[63538]: DEBUG nova.compute.manager [req-a6599433-36ed-4afb-9e39-d33c0c710bd3 req-b8211d5d-c69c-4b61-ac1a-8506f7a88762 service nova] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Received event network-vif-deleted-916b9a1d-5118-4f57-b7a9-9ca7fd8c9655 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1017.088144] env[63538]: INFO nova.compute.manager [req-a6599433-36ed-4afb-9e39-d33c0c710bd3 req-b8211d5d-c69c-4b61-ac1a-8506f7a88762 service nova] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Neutron deleted interface 916b9a1d-5118-4f57-b7a9-9ca7fd8c9655; detaching it from the instance and deleting it from the info cache [ 1017.088369] env[63538]: DEBUG nova.network.neutron [req-a6599433-36ed-4afb-9e39-d33c0c710bd3 req-b8211d5d-c69c-4b61-ac1a-8506f7a88762 service nova] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.186462] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.283465] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Task: {'id': task-5101448, 'name': Rename_Task, 'duration_secs': 0.173495} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.283799] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1017.284085] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3e599b44-bcea-4790-9af1-54824d17cbe0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.290391] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Waiting for the task: (returnval){ [ 1017.290391] env[63538]: value = "task-5101451" [ 1017.290391] env[63538]: _type = "Task" [ 1017.290391] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.298374] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Task: {'id': task-5101451, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.385639] env[63538]: DEBUG nova.scheduler.client.report [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1017.463116] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5201dc45-ceda-3e0f-2ac4-19440c014312, 'name': SearchDatastore_Task, 'duration_secs': 0.030309} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.463979] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4052f9a4-8e45-4e9a-8d76-a9e8d6f185bd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.470211] env[63538]: DEBUG nova.network.neutron [-] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.471543] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1017.471543] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521eaeba-bf55-28dc-88c1-74a9607427eb" [ 1017.471543] env[63538]: _type = "Task" [ 1017.471543] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.480896] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521eaeba-bf55-28dc-88c1-74a9607427eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.542359] env[63538]: DEBUG oslo_vmware.api [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101450, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157672} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.542788] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1017.543100] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1017.543304] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1017.543490] env[63538]: INFO nova.compute.manager [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1017.544140] env[63538]: DEBUG oslo.service.loopingcall [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1017.544140] env[63538]: DEBUG nova.compute.manager [-] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1017.544140] env[63538]: DEBUG nova.network.neutron [-] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1017.591303] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-55f6115f-b1b1-4ff1-80cb-cc4aaa21999e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.601813] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d40feb9-915c-48f7-b95f-aa592cceafa2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.644369] env[63538]: DEBUG nova.compute.manager [req-a6599433-36ed-4afb-9e39-d33c0c710bd3 req-b8211d5d-c69c-4b61-ac1a-8506f7a88762 service nova] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Detach interface failed, port_id=916b9a1d-5118-4f57-b7a9-9ca7fd8c9655, reason: Instance 4f81dc4e-2092-4a2c-a511-589d47d118b6 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1017.801847] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Task: {'id': task-5101451, 'name': PowerOnVM_Task} progress is 71%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.891401] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63538) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1017.891654] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.990s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.891955] env[63538]: DEBUG oslo_concurrency.lockutils [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.726s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.893541] env[63538]: INFO nova.compute.claims [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1017.973057] env[63538]: INFO nova.compute.manager [-] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Took 1.39 seconds to deallocate network for instance. [ 1017.989836] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521eaeba-bf55-28dc-88c1-74a9607427eb, 'name': SearchDatastore_Task, 'duration_secs': 0.010158} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.992618] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1017.992618] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 0a0d0372-dede-4df0-bb9e-231e8a5b3742/0a0d0372-dede-4df0-bb9e-231e8a5b3742.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1017.992618] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-522dd648-1b04-44fe-81c2-36b7e968006a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.001538] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1018.001538] env[63538]: value = "task-5101452" [ 1018.001538] env[63538]: _type = "Task" [ 1018.001538] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.012311] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101452, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.305846] env[63538]: DEBUG oslo_vmware.api [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Task: {'id': task-5101451, 'name': PowerOnVM_Task, 'duration_secs': 0.9458} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.306181] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1018.306398] env[63538]: INFO nova.compute.manager [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Took 8.80 seconds to spawn the instance on the hypervisor. [ 1018.306591] env[63538]: DEBUG nova.compute.manager [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1018.307543] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6096660-8dee-4fa2-b162-bd7cfae0d25c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.482946] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.513658] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101452, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.518022] env[63538]: DEBUG nova.network.neutron [-] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.827338] env[63538]: INFO nova.compute.manager [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Took 33.13 seconds to build instance. [ 1019.014864] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101452, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.668804} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.015257] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 0a0d0372-dede-4df0-bb9e-231e8a5b3742/0a0d0372-dede-4df0-bb9e-231e8a5b3742.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1019.015516] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1019.015885] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c38ba379-d274-454a-8ee0-d47701d95c35 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.021010] env[63538]: INFO nova.compute.manager [-] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Took 1.48 seconds to deallocate network for instance. [ 1019.030119] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1019.030119] env[63538]: value = "task-5101453" [ 1019.030119] env[63538]: _type = "Task" [ 1019.030119] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.041703] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101453, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.115898] env[63538]: DEBUG nova.compute.manager [req-f93335b4-bece-4836-8464-e3c707362fec req-7fe960d9-47db-493c-b336-f18ac8b108e9 service nova] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Received event network-vif-deleted-cff0fa7b-e08d-4e54-b01a-4b6cc4fd0189 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1019.190436] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a64efb43-7ba1-4a28-b2ad-e0ff18566742 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.201534] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95241c68-ada1-4cc6-8aff-ecaa7db957ed {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.233794] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f670199-b913-4d30-a370-371d34706338 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.243076] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1fc02f1-5c38-409b-a241-30b6e96ac13f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.259834] env[63538]: DEBUG nova.compute.provider_tree [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1019.331553] env[63538]: DEBUG oslo_concurrency.lockutils [None req-648a49c4-4077-4327-a531-b19849f13708 tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Lock "144df97e-f47b-4ead-8243-345d98b9f3e6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.645s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.529638] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.540947] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101453, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076849} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.541274] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1019.542176] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af4b3ee-eb92-4c97-9ece-4de30bb3fd0d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.569663] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] 0a0d0372-dede-4df0-bb9e-231e8a5b3742/0a0d0372-dede-4df0-bb9e-231e8a5b3742.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1019.570039] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-123092fe-5848-4042-8fb1-c9a36e76e6d7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.591293] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1019.591293] env[63538]: value = "task-5101454" [ 1019.591293] env[63538]: _type = "Task" [ 1019.591293] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.601790] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101454, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.763801] env[63538]: DEBUG nova.scheduler.client.report [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1020.103322] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101454, 'name': ReconfigVM_Task, 'duration_secs': 0.319787} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.103701] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Reconfigured VM instance instance-0000005c to attach disk [datastore1] 0a0d0372-dede-4df0-bb9e-231e8a5b3742/0a0d0372-dede-4df0-bb9e-231e8a5b3742.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1020.104310] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9048b5ee-5222-4221-8cb4-b7aed5fd6fa2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.111277] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1020.111277] env[63538]: value = "task-5101455" [ 1020.111277] env[63538]: _type = "Task" [ 1020.111277] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.119912] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101455, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.268999] env[63538]: DEBUG oslo_concurrency.lockutils [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.377s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.269593] env[63538]: DEBUG nova.compute.manager [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1020.272304] env[63538]: DEBUG oslo_concurrency.lockutils [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 20.479s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.272485] env[63538]: DEBUG nova.objects.instance [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63538) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1020.622760] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101455, 'name': Rename_Task, 'duration_secs': 0.149622} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.623116] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1020.623392] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d91a4e0-9760-4f57-9d71-c09282c178d2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.632060] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1020.632060] env[63538]: value = "task-5101456" [ 1020.632060] env[63538]: _type = "Task" [ 1020.632060] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.641540] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101456, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.777795] env[63538]: DEBUG nova.compute.utils [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1020.782247] env[63538]: DEBUG nova.compute.manager [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Not allocating networking since 'none' was specified. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1021.145192] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101456, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.283015] env[63538]: DEBUG nova.compute.manager [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1021.286831] env[63538]: DEBUG oslo_concurrency.lockutils [None req-70b27aca-3d5b-4654-a373-097f6c0ecb0c tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.289052] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.709s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.289052] env[63538]: DEBUG nova.objects.instance [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lazy-loading 'pci_requests' on Instance uuid 0df15328-aebd-44c5-9c78-ee05f188ad95 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1021.643340] env[63538]: DEBUG oslo_vmware.api [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101456, 'name': PowerOnVM_Task, 'duration_secs': 0.558304} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.643657] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1021.643892] env[63538]: INFO nova.compute.manager [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Took 9.63 seconds to spawn the instance on the hypervisor. [ 1021.644094] env[63538]: DEBUG nova.compute.manager [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1021.644901] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b817b89-c6f6-400f-8710-950e8e7b7ef1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.794020] env[63538]: DEBUG nova.objects.instance [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lazy-loading 'numa_topology' on Instance uuid 0df15328-aebd-44c5-9c78-ee05f188ad95 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1022.163692] env[63538]: INFO nova.compute.manager [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Took 31.33 seconds to build instance. [ 1022.297883] env[63538]: DEBUG nova.compute.manager [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1022.300966] env[63538]: INFO nova.compute.claims [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1022.329160] env[63538]: DEBUG nova.virt.hardware [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1022.329463] env[63538]: DEBUG nova.virt.hardware [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1022.329643] env[63538]: DEBUG nova.virt.hardware [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1022.329838] env[63538]: DEBUG nova.virt.hardware [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1022.330032] env[63538]: DEBUG nova.virt.hardware [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1022.330200] env[63538]: DEBUG nova.virt.hardware [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1022.330433] env[63538]: DEBUG nova.virt.hardware [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1022.330619] env[63538]: DEBUG nova.virt.hardware [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1022.330815] env[63538]: DEBUG nova.virt.hardware [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1022.330999] env[63538]: DEBUG nova.virt.hardware [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1022.331207] env[63538]: DEBUG nova.virt.hardware [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1022.332114] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-065ccad0-fc49-4f7a-b31f-19a0031da09d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.341763] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57743742-1e9b-427d-a23c-84acb0db810a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.357494] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Instance VIF info [] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1022.363068] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Creating folder: Project (ada45afdde2d466f8fcc245c5f4cea19). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1022.363355] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c3c79eb1-db4a-4468-8957-4ee51bc75565 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.374568] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Created folder: Project (ada45afdde2d466f8fcc245c5f4cea19) in parent group-v992234. [ 1022.374773] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Creating folder: Instances. Parent ref: group-v992479. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1022.375034] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-336db5d9-ad79-48ba-9dee-16f29d151f77 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.384792] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Created folder: Instances in parent group-v992479. [ 1022.385245] env[63538]: DEBUG oslo.service.loopingcall [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1022.385316] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1022.385564] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a73a9c0c-0ea4-4973-b79e-6afc7eb22343 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.404151] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1022.404151] env[63538]: value = "task-5101459" [ 1022.404151] env[63538]: _type = "Task" [ 1022.404151] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.404632] env[63538]: DEBUG oslo_concurrency.lockutils [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Acquiring lock "144df97e-f47b-4ead-8243-345d98b9f3e6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.404885] env[63538]: DEBUG oslo_concurrency.lockutils [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Lock "144df97e-f47b-4ead-8243-345d98b9f3e6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.405110] env[63538]: DEBUG oslo_concurrency.lockutils [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Acquiring lock "144df97e-f47b-4ead-8243-345d98b9f3e6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.405303] env[63538]: DEBUG oslo_concurrency.lockutils [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Lock "144df97e-f47b-4ead-8243-345d98b9f3e6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.405475] env[63538]: DEBUG oslo_concurrency.lockutils [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Lock "144df97e-f47b-4ead-8243-345d98b9f3e6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.410432] env[63538]: INFO nova.compute.manager [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Terminating instance [ 1022.412571] env[63538]: DEBUG nova.compute.manager [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1022.412780] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1022.413654] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b076013-e3d6-40fb-bd72-47c92a3a748d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.420714] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101459, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.424995] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1022.425275] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e0f856fd-6978-46e8-a911-c65fedad344c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.430988] env[63538]: DEBUG oslo_vmware.api [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Waiting for the task: (returnval){ [ 1022.430988] env[63538]: value = "task-5101460" [ 1022.430988] env[63538]: _type = "Task" [ 1022.430988] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.442022] env[63538]: DEBUG oslo_vmware.api [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Task: {'id': task-5101460, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.666349] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f3ee3e40-fed9-402a-ad26-ca0723951a86 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "0a0d0372-dede-4df0-bb9e-231e8a5b3742" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.849s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.916258] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101459, 'name': CreateVM_Task, 'duration_secs': 0.349013} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.917188] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1022.917752] env[63538]: DEBUG oslo_concurrency.lockutils [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1022.917887] env[63538]: DEBUG oslo_concurrency.lockutils [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.918270] env[63538]: DEBUG oslo_concurrency.lockutils [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1022.918803] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59ba0de2-115d-45e8-bddb-24d120d70e9b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.926772] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Waiting for the task: (returnval){ [ 1022.926772] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e08e4a-c779-0f32-f75b-ee8867664791" [ 1022.926772] env[63538]: _type = "Task" [ 1022.926772] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.940894] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e08e4a-c779-0f32-f75b-ee8867664791, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.947924] env[63538]: DEBUG oslo_vmware.api [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Task: {'id': task-5101460, 'name': PowerOffVM_Task, 'duration_secs': 0.219493} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.948721] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1022.948996] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1022.950030] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-74c892a2-c885-4ff1-93bb-421740b6ded3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.018544] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1023.018843] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1023.019072] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Deleting the datastore file [datastore1] 144df97e-f47b-4ead-8243-345d98b9f3e6 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1023.020165] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-062dfcd7-ec9b-46cc-afda-09d84297653b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.028387] env[63538]: DEBUG oslo_vmware.api [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Waiting for the task: (returnval){ [ 1023.028387] env[63538]: value = "task-5101462" [ 1023.028387] env[63538]: _type = "Task" [ 1023.028387] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.049333] env[63538]: DEBUG oslo_vmware.api [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Task: {'id': task-5101462, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.405968] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0ad35f34-f51b-4076-a030-26ab3d176daa tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "0a0d0372-dede-4df0-bb9e-231e8a5b3742" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.406266] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0ad35f34-f51b-4076-a030-26ab3d176daa tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "0a0d0372-dede-4df0-bb9e-231e8a5b3742" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1023.406387] env[63538]: DEBUG nova.compute.manager [None req-0ad35f34-f51b-4076-a030-26ab3d176daa tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1023.407343] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d2312f1-681d-449e-8ba8-536b1c29fd98 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.414731] env[63538]: DEBUG nova.compute.manager [None req-0ad35f34-f51b-4076-a030-26ab3d176daa tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63538) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1023.415322] env[63538]: DEBUG nova.objects.instance [None req-0ad35f34-f51b-4076-a030-26ab3d176daa tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lazy-loading 'flavor' on Instance uuid 0a0d0372-dede-4df0-bb9e-231e8a5b3742 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1023.439830] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e08e4a-c779-0f32-f75b-ee8867664791, 'name': SearchDatastore_Task, 'duration_secs': 0.012392} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.440135] env[63538]: DEBUG oslo_concurrency.lockutils [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1023.440368] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1023.441040] env[63538]: DEBUG oslo_concurrency.lockutils [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1023.441040] env[63538]: DEBUG oslo_concurrency.lockutils [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.441040] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1023.442341] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18dbefba-27a7-406b-80d3-b7ac4f4dcdc9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.452101] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1023.452305] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1023.453062] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45c6c725-586e-47da-8717-ef41f7b12e48 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.458860] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Waiting for the task: (returnval){ [ 1023.458860] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e1ff4d-06c8-1f76-4801-6fc2a9692ded" [ 1023.458860] env[63538]: _type = "Task" [ 1023.458860] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.468693] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e1ff4d-06c8-1f76-4801-6fc2a9692ded, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.540704] env[63538]: DEBUG oslo_vmware.api [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Task: {'id': task-5101462, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147459} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.543454] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1023.543706] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1023.543942] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1023.544147] env[63538]: INFO nova.compute.manager [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1023.544402] env[63538]: DEBUG oslo.service.loopingcall [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1023.544814] env[63538]: DEBUG nova.compute.manager [-] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1023.544916] env[63538]: DEBUG nova.network.neutron [-] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1023.567479] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5422c744-6d94-4ce5-86b4-8440bb180a09 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.577552] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7080583c-7f70-47f7-85f0-b45f9f31fe21 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.614623] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-507b657e-c904-4814-8d20-d6ba9b595a83 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.624054] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-382e8e58-9f7f-45e1-9166-54354436bffb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.639137] env[63538]: DEBUG nova.compute.provider_tree [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1023.811117] env[63538]: DEBUG nova.compute.manager [req-bfa1f5f5-28d7-41a4-b2b8-0c27348b2b4c req-0669123b-e2b3-4b19-957f-35bcc5787560 service nova] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Received event network-vif-deleted-7bb10fc8-2099-48a7-bad4-846efd238d94 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1023.811328] env[63538]: INFO nova.compute.manager [req-bfa1f5f5-28d7-41a4-b2b8-0c27348b2b4c req-0669123b-e2b3-4b19-957f-35bcc5787560 service nova] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Neutron deleted interface 7bb10fc8-2099-48a7-bad4-846efd238d94; detaching it from the instance and deleting it from the info cache [ 1023.811503] env[63538]: DEBUG nova.network.neutron [req-bfa1f5f5-28d7-41a4-b2b8-0c27348b2b4c req-0669123b-e2b3-4b19-957f-35bcc5787560 service nova] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.920843] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ad35f34-f51b-4076-a030-26ab3d176daa tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1023.921203] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-edc431ef-8571-4af5-9887-7e8ebcab36b9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.929215] env[63538]: DEBUG oslo_vmware.api [None req-0ad35f34-f51b-4076-a030-26ab3d176daa tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1023.929215] env[63538]: value = "task-5101463" [ 1023.929215] env[63538]: _type = "Task" [ 1023.929215] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.938849] env[63538]: DEBUG oslo_vmware.api [None req-0ad35f34-f51b-4076-a030-26ab3d176daa tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101463, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.969759] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e1ff4d-06c8-1f76-4801-6fc2a9692ded, 'name': SearchDatastore_Task, 'duration_secs': 0.009276} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.970616] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1b1314b-8b7d-4314-958e-b8d28451423b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.976820] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Waiting for the task: (returnval){ [ 1023.976820] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5282b459-df55-c214-b05b-e45b9e603d91" [ 1023.976820] env[63538]: _type = "Task" [ 1023.976820] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.985037] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5282b459-df55-c214-b05b-e45b9e603d91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.142288] env[63538]: DEBUG nova.scheduler.client.report [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1024.288535] env[63538]: DEBUG nova.network.neutron [-] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.314906] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5de12ea2-99d6-47c0-b942-a2dbd4c60160 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.326148] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83345f9-1651-40f8-85fb-d7b0c479ef4b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.362813] env[63538]: DEBUG nova.compute.manager [req-bfa1f5f5-28d7-41a4-b2b8-0c27348b2b4c req-0669123b-e2b3-4b19-957f-35bcc5787560 service nova] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Detach interface failed, port_id=7bb10fc8-2099-48a7-bad4-846efd238d94, reason: Instance 144df97e-f47b-4ead-8243-345d98b9f3e6 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1024.439417] env[63538]: DEBUG oslo_vmware.api [None req-0ad35f34-f51b-4076-a030-26ab3d176daa tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101463, 'name': PowerOffVM_Task, 'duration_secs': 0.209154} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.439741] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ad35f34-f51b-4076-a030-26ab3d176daa tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1024.439888] env[63538]: DEBUG nova.compute.manager [None req-0ad35f34-f51b-4076-a030-26ab3d176daa tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1024.440717] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3254bc-6aa5-441d-b0a5-0f8d75e44064 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.487301] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5282b459-df55-c214-b05b-e45b9e603d91, 'name': SearchDatastore_Task, 'duration_secs': 0.014686} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.487618] env[63538]: DEBUG oslo_concurrency.lockutils [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.487881] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 148790a7-0a35-4d26-ae9f-6f954a161c88/148790a7-0a35-4d26-ae9f-6f954a161c88.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1024.488176] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-55b03972-c5d3-4c82-9204-e9479526e7ba {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.494874] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Waiting for the task: (returnval){ [ 1024.494874] env[63538]: value = "task-5101464" [ 1024.494874] env[63538]: _type = "Task" [ 1024.494874] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.503500] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101464, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.647646] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.359s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.649934] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.802s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.650167] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.652067] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.463s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.653637] env[63538]: INFO nova.compute.claims [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1024.683237] env[63538]: INFO nova.scheduler.client.report [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Deleted allocations for instance 0339c969-ad97-47b1-8fab-ee595738d9df [ 1024.699177] env[63538]: INFO nova.network.neutron [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Updating port 8a332a90-393f-41ae-a924-4959c06e6207 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1024.791231] env[63538]: INFO nova.compute.manager [-] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Took 1.25 seconds to deallocate network for instance. [ 1024.954132] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0ad35f34-f51b-4076-a030-26ab3d176daa tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "0a0d0372-dede-4df0-bb9e-231e8a5b3742" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.548s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.006061] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101464, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.190793] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8f65feb-a1cf-4a49-9b1b-c75d178b48a1 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "0339c969-ad97-47b1-8fab-ee595738d9df" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.716s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.298509] env[63538]: DEBUG oslo_concurrency.lockutils [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.506847] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101464, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.722297} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.507276] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 148790a7-0a35-4d26-ae9f-6f954a161c88/148790a7-0a35-4d26-ae9f-6f954a161c88.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1025.507403] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1025.507651] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cf387655-945f-401d-9226-7868c62fda3d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.516653] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Waiting for the task: (returnval){ [ 1025.516653] env[63538]: value = "task-5101465" [ 1025.516653] env[63538]: _type = "Task" [ 1025.516653] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.528890] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101465, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.864087] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "0a0d0372-dede-4df0-bb9e-231e8a5b3742" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.864367] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "0a0d0372-dede-4df0-bb9e-231e8a5b3742" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.864579] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "0a0d0372-dede-4df0-bb9e-231e8a5b3742-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.864767] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "0a0d0372-dede-4df0-bb9e-231e8a5b3742-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.864941] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "0a0d0372-dede-4df0-bb9e-231e8a5b3742-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.867345] env[63538]: INFO nova.compute.manager [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Terminating instance [ 1025.869168] env[63538]: DEBUG nova.compute.manager [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1025.869372] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1025.870206] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbfa7c1d-c671-43ec-be01-3e46eb32477c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.882744] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1025.883182] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-28464d62-44c7-4637-af5e-e8b2df3b3cd2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.930616] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9089139c-3f41-4266-9bbc-6cbd89d9db79 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.938773] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e47f072e-eb12-4e39-90be-b9631707d5fa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.973374] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f95b7b2-2513-47dd-9665-cb10e839bb14 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.976223] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1025.976438] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1025.976624] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Deleting the datastore file [datastore1] 0a0d0372-dede-4df0-bb9e-231e8a5b3742 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1025.976877] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2253f129-d234-4a14-948d-3e66a09426bc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.984845] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e2cd5b-e459-4a84-8285-94fcdc81a499 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.988663] env[63538]: DEBUG oslo_vmware.api [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1025.988663] env[63538]: value = "task-5101467" [ 1025.988663] env[63538]: _type = "Task" [ 1025.988663] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.002765] env[63538]: DEBUG nova.compute.provider_tree [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1026.010405] env[63538]: DEBUG oslo_vmware.api [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101467, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.018230] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "90e56075-0d77-467f-90be-913315b63b33" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1026.018230] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "90e56075-0d77-467f-90be-913315b63b33" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.018390] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "90e56075-0d77-467f-90be-913315b63b33-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1026.018656] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "90e56075-0d77-467f-90be-913315b63b33-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.018739] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "90e56075-0d77-467f-90be-913315b63b33-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.024260] env[63538]: INFO nova.compute.manager [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Terminating instance [ 1026.027112] env[63538]: DEBUG nova.compute.manager [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1026.027328] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1026.028254] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34427f66-c620-4791-abb4-a73d75b7bf67 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.034414] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101465, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066061} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.035137] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1026.036011] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db37b536-c10d-4a3c-9b96-5f41ff9087aa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.041384] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1026.042013] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bcd80020-8327-482b-b267-f1b15b7c1cba {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.061470] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] 148790a7-0a35-4d26-ae9f-6f954a161c88/148790a7-0a35-4d26-ae9f-6f954a161c88.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1026.063494] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad83d06d-fcc4-4990-89a5-97ee5c4f150c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.080306] env[63538]: DEBUG oslo_vmware.api [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 1026.080306] env[63538]: value = "task-5101468" [ 1026.080306] env[63538]: _type = "Task" [ 1026.080306] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.086338] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Waiting for the task: (returnval){ [ 1026.086338] env[63538]: value = "task-5101469" [ 1026.086338] env[63538]: _type = "Task" [ 1026.086338] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.093245] env[63538]: DEBUG oslo_vmware.api [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101468, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.099094] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101469, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.228563] env[63538]: DEBUG nova.compute.manager [req-55894b13-38bc-4b92-ac39-3a26958eaa60 req-c47007ff-3699-4137-b6ef-a8a6cd2c291a service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Received event network-vif-plugged-8a332a90-393f-41ae-a924-4959c06e6207 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1026.228868] env[63538]: DEBUG oslo_concurrency.lockutils [req-55894b13-38bc-4b92-ac39-3a26958eaa60 req-c47007ff-3699-4137-b6ef-a8a6cd2c291a service nova] Acquiring lock "0df15328-aebd-44c5-9c78-ee05f188ad95-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1026.229106] env[63538]: DEBUG oslo_concurrency.lockutils [req-55894b13-38bc-4b92-ac39-3a26958eaa60 req-c47007ff-3699-4137-b6ef-a8a6cd2c291a service nova] Lock "0df15328-aebd-44c5-9c78-ee05f188ad95-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.229232] env[63538]: DEBUG oslo_concurrency.lockutils [req-55894b13-38bc-4b92-ac39-3a26958eaa60 req-c47007ff-3699-4137-b6ef-a8a6cd2c291a service nova] Lock "0df15328-aebd-44c5-9c78-ee05f188ad95-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.229413] env[63538]: DEBUG nova.compute.manager [req-55894b13-38bc-4b92-ac39-3a26958eaa60 req-c47007ff-3699-4137-b6ef-a8a6cd2c291a service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] No waiting events found dispatching network-vif-plugged-8a332a90-393f-41ae-a924-4959c06e6207 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1026.229589] env[63538]: WARNING nova.compute.manager [req-55894b13-38bc-4b92-ac39-3a26958eaa60 req-c47007ff-3699-4137-b6ef-a8a6cd2c291a service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Received unexpected event network-vif-plugged-8a332a90-393f-41ae-a924-4959c06e6207 for instance with vm_state shelved_offloaded and task_state spawning. [ 1026.376861] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "refresh_cache-0df15328-aebd-44c5-9c78-ee05f188ad95" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1026.377068] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquired lock "refresh_cache-0df15328-aebd-44c5-9c78-ee05f188ad95" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.377259] env[63538]: DEBUG nova.network.neutron [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1026.497425] env[63538]: DEBUG oslo_concurrency.lockutils [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "7ee64b60-9b88-4710-a477-e984fa36a142" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1026.497654] env[63538]: DEBUG oslo_concurrency.lockutils [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "7ee64b60-9b88-4710-a477-e984fa36a142" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.503506] env[63538]: DEBUG oslo_vmware.api [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101467, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198462} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.503723] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1026.503916] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1026.504132] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1026.504314] env[63538]: INFO nova.compute.manager [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1026.504551] env[63538]: DEBUG oslo.service.loopingcall [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1026.505062] env[63538]: DEBUG nova.compute.manager [-] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1026.505187] env[63538]: DEBUG nova.network.neutron [-] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1026.507420] env[63538]: DEBUG nova.scheduler.client.report [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1026.595520] env[63538]: DEBUG oslo_vmware.api [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101468, 'name': PowerOffVM_Task, 'duration_secs': 0.23987} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.595891] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1026.596091] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1026.596828] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b19d623-0efc-4451-b20b-09b474e4b823 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.603524] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101469, 'name': ReconfigVM_Task, 'duration_secs': 0.331506} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.604224] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Reconfigured VM instance instance-0000005d to attach disk [datastore2] 148790a7-0a35-4d26-ae9f-6f954a161c88/148790a7-0a35-4d26-ae9f-6f954a161c88.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1026.604849] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-16bd2026-b3ff-4ff9-b9b7-2f0e12bea15e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.612216] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Waiting for the task: (returnval){ [ 1026.612216] env[63538]: value = "task-5101471" [ 1026.612216] env[63538]: _type = "Task" [ 1026.612216] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.621693] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101471, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.686920] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1026.687225] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1026.687498] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Deleting the datastore file [datastore1] 90e56075-0d77-467f-90be-913315b63b33 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1026.687995] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-15d4f6ab-c0f9-4af8-bf2a-188bf23a4e45 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.695267] env[63538]: DEBUG oslo_vmware.api [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 1026.695267] env[63538]: value = "task-5101472" [ 1026.695267] env[63538]: _type = "Task" [ 1026.695267] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.704607] env[63538]: DEBUG oslo_vmware.api [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101472, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.000274] env[63538]: DEBUG nova.compute.manager [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1027.012577] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.360s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.013159] env[63538]: DEBUG nova.compute.manager [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1027.016255] env[63538]: DEBUG oslo_concurrency.lockutils [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.457s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.016431] env[63538]: DEBUG oslo_concurrency.lockutils [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.022019] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.833s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.022019] env[63538]: INFO nova.compute.claims [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1027.065358] env[63538]: INFO nova.scheduler.client.report [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Deleted allocations for instance 4ec5d3a2-8b29-4074-b323-f94704043b8b [ 1027.123071] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101471, 'name': Rename_Task, 'duration_secs': 0.16888} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.123375] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1027.123634] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a32eb8cc-bcf7-4731-94f3-f2fc9b3aa6e0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.131897] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Waiting for the task: (returnval){ [ 1027.131897] env[63538]: value = "task-5101473" [ 1027.131897] env[63538]: _type = "Task" [ 1027.131897] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.144021] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101473, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.209216] env[63538]: DEBUG oslo_vmware.api [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101472, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158022} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.209677] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1027.210045] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1027.210287] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1027.210581] env[63538]: INFO nova.compute.manager [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 90e56075-0d77-467f-90be-913315b63b33] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1027.211197] env[63538]: DEBUG oslo.service.loopingcall [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1027.211296] env[63538]: DEBUG nova.compute.manager [-] [instance: 90e56075-0d77-467f-90be-913315b63b33] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1027.211451] env[63538]: DEBUG nova.network.neutron [-] [instance: 90e56075-0d77-467f-90be-913315b63b33] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1027.247177] env[63538]: DEBUG nova.network.neutron [-] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.372697] env[63538]: DEBUG nova.network.neutron [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Updating instance_info_cache with network_info: [{"id": "8a332a90-393f-41ae-a924-4959c06e6207", "address": "fa:16:3e:37:0e:9d", "network": {"id": "8fd1ecf4-3813-4ca4-82b4-8ba3f04e3c41", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1232144260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fe11c1386b14d139f4416cbf20fb201", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a332a90-39", "ovs_interfaceid": "8a332a90-393f-41ae-a924-4959c06e6207", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.512277] env[63538]: DEBUG nova.compute.manager [req-b8e824de-cda5-4cdd-8745-eafe6df45b8f req-00e6aeab-453e-417a-a509-1e9a6bf02eb6 service nova] [instance: 90e56075-0d77-467f-90be-913315b63b33] Received event network-vif-deleted-674bcf37-4948-4ce1-8f18-5ba7912f2544 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1027.513285] env[63538]: INFO nova.compute.manager [req-b8e824de-cda5-4cdd-8745-eafe6df45b8f req-00e6aeab-453e-417a-a509-1e9a6bf02eb6 service nova] [instance: 90e56075-0d77-467f-90be-913315b63b33] Neutron deleted interface 674bcf37-4948-4ce1-8f18-5ba7912f2544; detaching it from the instance and deleting it from the info cache [ 1027.513285] env[63538]: DEBUG nova.network.neutron [req-b8e824de-cda5-4cdd-8745-eafe6df45b8f req-00e6aeab-453e-417a-a509-1e9a6bf02eb6 service nova] [instance: 90e56075-0d77-467f-90be-913315b63b33] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.529212] env[63538]: DEBUG nova.compute.utils [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1027.534347] env[63538]: DEBUG oslo_concurrency.lockutils [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.535222] env[63538]: DEBUG nova.compute.manager [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1027.535388] env[63538]: DEBUG nova.network.neutron [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1027.574907] env[63538]: DEBUG oslo_concurrency.lockutils [None req-59ad00b1-7ed5-45e4-89fc-6b98d304cd77 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "4ec5d3a2-8b29-4074-b323-f94704043b8b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.981s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.604698] env[63538]: DEBUG nova.policy [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '16fdc041f4c74e0ea76ee8984f9786f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a701618902d411b8af203fdbb1069be', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1027.643102] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101473, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.749443] env[63538]: INFO nova.compute.manager [-] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Took 1.24 seconds to deallocate network for instance. [ 1027.875712] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Releasing lock "refresh_cache-0df15328-aebd-44c5-9c78-ee05f188ad95" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1027.907139] env[63538]: DEBUG nova.virt.hardware [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='15c7df8397079f6e53c949e7f5bf160e',container_format='bare',created_at=2025-12-12T12:56:54Z,direct_url=,disk_format='vmdk',id=6185ea79-9c71-4180-85df-f64f05052bed,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-2121014935-shelved',owner='1fe11c1386b14d139f4416cbf20fb201',properties=ImageMetaProps,protected=,size=31665152,status='active',tags=,updated_at=2025-12-12T12:57:16Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1027.907393] env[63538]: DEBUG nova.virt.hardware [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1027.907558] env[63538]: DEBUG nova.virt.hardware [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1027.907744] env[63538]: DEBUG nova.virt.hardware [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1027.907898] env[63538]: DEBUG nova.virt.hardware [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1027.908064] env[63538]: DEBUG nova.virt.hardware [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1027.908287] env[63538]: DEBUG nova.virt.hardware [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1027.908450] env[63538]: DEBUG nova.virt.hardware [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1027.908634] env[63538]: DEBUG nova.virt.hardware [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1027.908808] env[63538]: DEBUG nova.virt.hardware [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1027.909555] env[63538]: DEBUG nova.virt.hardware [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1027.910197] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c3ca571-8e03-427a-ba9b-b805d59adabc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.918554] env[63538]: DEBUG nova.network.neutron [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Successfully created port: c5eca2fb-a7a7-43a0-a2a0-5016e8c11f78 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1027.926990] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a4150c-291e-41dd-8b18-c626b86103e4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.951117] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:0e:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78340140-126f-4ef8-a340-debaa64da3e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8a332a90-393f-41ae-a924-4959c06e6207', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1027.956175] env[63538]: DEBUG oslo.service.loopingcall [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1027.956606] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1027.956939] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9af21a58-0e25-4f75-8323-85a16345b54a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.972193] env[63538]: DEBUG nova.network.neutron [-] [instance: 90e56075-0d77-467f-90be-913315b63b33] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.983587] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1027.983587] env[63538]: value = "task-5101474" [ 1027.983587] env[63538]: _type = "Task" [ 1027.983587] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.992834] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101474, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.016535] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-54d036f7-81cb-4bdc-842c-496a4cbd82ce {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.027039] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea4a24b-17a3-46df-90f9-4e90e4988322 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.039853] env[63538]: DEBUG nova.compute.manager [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1028.073609] env[63538]: DEBUG nova.compute.manager [req-b8e824de-cda5-4cdd-8745-eafe6df45b8f req-00e6aeab-453e-417a-a509-1e9a6bf02eb6 service nova] [instance: 90e56075-0d77-467f-90be-913315b63b33] Detach interface failed, port_id=674bcf37-4948-4ce1-8f18-5ba7912f2544, reason: Instance 90e56075-0d77-467f-90be-913315b63b33 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1028.151235] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101473, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.257377] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.337222] env[63538]: DEBUG nova.compute.manager [req-6b7b82a4-7ca2-445a-b473-5898c63c2056 req-84027a64-a57a-4792-b605-5ffe8c58a8e6 service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Received event network-changed-8a332a90-393f-41ae-a924-4959c06e6207 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1028.338135] env[63538]: DEBUG nova.compute.manager [req-6b7b82a4-7ca2-445a-b473-5898c63c2056 req-84027a64-a57a-4792-b605-5ffe8c58a8e6 service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Refreshing instance network info cache due to event network-changed-8a332a90-393f-41ae-a924-4959c06e6207. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1028.338135] env[63538]: DEBUG oslo_concurrency.lockutils [req-6b7b82a4-7ca2-445a-b473-5898c63c2056 req-84027a64-a57a-4792-b605-5ffe8c58a8e6 service nova] Acquiring lock "refresh_cache-0df15328-aebd-44c5-9c78-ee05f188ad95" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1028.338135] env[63538]: DEBUG oslo_concurrency.lockutils [req-6b7b82a4-7ca2-445a-b473-5898c63c2056 req-84027a64-a57a-4792-b605-5ffe8c58a8e6 service nova] Acquired lock "refresh_cache-0df15328-aebd-44c5-9c78-ee05f188ad95" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.338342] env[63538]: DEBUG nova.network.neutron [req-6b7b82a4-7ca2-445a-b473-5898c63c2056 req-84027a64-a57a-4792-b605-5ffe8c58a8e6 service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Refreshing network info cache for port 8a332a90-393f-41ae-a924-4959c06e6207 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1028.366613] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ba9f2c-43e7-4796-9b97-e0b2778ee3d8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.375315] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3f227f-cf50-4dbb-9f08-175beb98eff2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.406634] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-014a0e3c-67ab-4098-a0ad-7d80714b0d64 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.415505] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd452124-6f42-4f35-b048-fe5efbf5a61d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.429381] env[63538]: DEBUG nova.compute.provider_tree [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1028.478018] env[63538]: INFO nova.compute.manager [-] [instance: 90e56075-0d77-467f-90be-913315b63b33] Took 1.27 seconds to deallocate network for instance. [ 1028.496465] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101474, 'name': CreateVM_Task, 'duration_secs': 0.37213} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.496655] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1028.497385] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6185ea79-9c71-4180-85df-f64f05052bed" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1028.497561] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6185ea79-9c71-4180-85df-f64f05052bed" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.497962] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6185ea79-9c71-4180-85df-f64f05052bed" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1028.498240] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-063b10a1-c058-49f7-8f39-e98a9a0119b7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.503258] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1028.503258] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a0016f-ff92-ab1c-c549-e56beb456592" [ 1028.503258] env[63538]: _type = "Task" [ 1028.503258] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.511958] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a0016f-ff92-ab1c-c549-e56beb456592, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.643735] env[63538]: DEBUG oslo_vmware.api [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101473, 'name': PowerOnVM_Task, 'duration_secs': 1.129756} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.644108] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1028.644345] env[63538]: INFO nova.compute.manager [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Took 6.35 seconds to spawn the instance on the hypervisor. [ 1028.644538] env[63538]: DEBUG nova.compute.manager [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1028.644941] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a4d39ac-3027-40db-b901-8de4cf972205 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "interface-209c5f46-9c63-4f55-bc75-bc2e4da989ac-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.645183] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a4d39ac-3027-40db-b901-8de4cf972205 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "interface-209c5f46-9c63-4f55-bc75-bc2e4da989ac-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.645483] env[63538]: DEBUG nova.objects.instance [None req-1a4d39ac-3027-40db-b901-8de4cf972205 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lazy-loading 'flavor' on Instance uuid 209c5f46-9c63-4f55-bc75-bc2e4da989ac {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1028.647371] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d076daf9-706e-41e0-8d37-04e77584ea1f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.932050] env[63538]: DEBUG nova.scheduler.client.report [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1028.984622] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.016616] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6185ea79-9c71-4180-85df-f64f05052bed" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.016995] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Processing image 6185ea79-9c71-4180-85df-f64f05052bed {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1029.017318] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6185ea79-9c71-4180-85df-f64f05052bed/6185ea79-9c71-4180-85df-f64f05052bed.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1029.017528] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6185ea79-9c71-4180-85df-f64f05052bed/6185ea79-9c71-4180-85df-f64f05052bed.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.017770] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1029.018390] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5635b3f8-084a-499c-80e1-6e5624c080bd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.029616] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1029.029712] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1029.030622] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e5b0d3a-1e2d-4709-841b-1338477baff0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.038624] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1029.038624] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5239ad40-b9bc-a2b8-a653-2639220fb970" [ 1029.038624] env[63538]: _type = "Task" [ 1029.038624] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.048176] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5239ad40-b9bc-a2b8-a653-2639220fb970, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.052548] env[63538]: DEBUG nova.compute.manager [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1029.080820] env[63538]: DEBUG nova.virt.hardware [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1029.081343] env[63538]: DEBUG nova.virt.hardware [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1029.081343] env[63538]: DEBUG nova.virt.hardware [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1029.081482] env[63538]: DEBUG nova.virt.hardware [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1029.081923] env[63538]: DEBUG nova.virt.hardware [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1029.081923] env[63538]: DEBUG nova.virt.hardware [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1029.082753] env[63538]: DEBUG nova.virt.hardware [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1029.082753] env[63538]: DEBUG nova.virt.hardware [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1029.082753] env[63538]: DEBUG nova.virt.hardware [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1029.082753] env[63538]: DEBUG nova.virt.hardware [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1029.082874] env[63538]: DEBUG nova.virt.hardware [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1029.083679] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a43532-e797-4a45-be92-d3abd28ab168 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.091951] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b222d829-dcc4-4942-a14b-091371028638 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.107572] env[63538]: DEBUG nova.network.neutron [req-6b7b82a4-7ca2-445a-b473-5898c63c2056 req-84027a64-a57a-4792-b605-5ffe8c58a8e6 service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Updated VIF entry in instance network info cache for port 8a332a90-393f-41ae-a924-4959c06e6207. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1029.107971] env[63538]: DEBUG nova.network.neutron [req-6b7b82a4-7ca2-445a-b473-5898c63c2056 req-84027a64-a57a-4792-b605-5ffe8c58a8e6 service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Updating instance_info_cache with network_info: [{"id": "8a332a90-393f-41ae-a924-4959c06e6207", "address": "fa:16:3e:37:0e:9d", "network": {"id": "8fd1ecf4-3813-4ca4-82b4-8ba3f04e3c41", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1232144260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fe11c1386b14d139f4416cbf20fb201", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a332a90-39", "ovs_interfaceid": "8a332a90-393f-41ae-a924-4959c06e6207", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.151913] env[63538]: DEBUG nova.objects.instance [None req-1a4d39ac-3027-40db-b901-8de4cf972205 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lazy-loading 'pci_requests' on Instance uuid 209c5f46-9c63-4f55-bc75-bc2e4da989ac {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1029.164328] env[63538]: INFO nova.compute.manager [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Took 35.02 seconds to build instance. [ 1029.437160] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.418s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.437686] env[63538]: DEBUG nova.compute.manager [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1029.444029] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.958s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.444029] env[63538]: DEBUG nova.objects.instance [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Lazy-loading 'resources' on Instance uuid 4f81dc4e-2092-4a2c-a511-589d47d118b6 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1029.536510] env[63538]: DEBUG nova.compute.manager [req-b421925a-a815-424f-9923-b719275f6a75 req-113f363b-b895-4e66-8dad-d67187c1f918 service nova] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Received event network-vif-plugged-c5eca2fb-a7a7-43a0-a2a0-5016e8c11f78 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1029.536751] env[63538]: DEBUG oslo_concurrency.lockutils [req-b421925a-a815-424f-9923-b719275f6a75 req-113f363b-b895-4e66-8dad-d67187c1f918 service nova] Acquiring lock "fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.536972] env[63538]: DEBUG oslo_concurrency.lockutils [req-b421925a-a815-424f-9923-b719275f6a75 req-113f363b-b895-4e66-8dad-d67187c1f918 service nova] Lock "fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.537164] env[63538]: DEBUG oslo_concurrency.lockutils [req-b421925a-a815-424f-9923-b719275f6a75 req-113f363b-b895-4e66-8dad-d67187c1f918 service nova] Lock "fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.537335] env[63538]: DEBUG nova.compute.manager [req-b421925a-a815-424f-9923-b719275f6a75 req-113f363b-b895-4e66-8dad-d67187c1f918 service nova] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] No waiting events found dispatching network-vif-plugged-c5eca2fb-a7a7-43a0-a2a0-5016e8c11f78 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1029.537506] env[63538]: WARNING nova.compute.manager [req-b421925a-a815-424f-9923-b719275f6a75 req-113f363b-b895-4e66-8dad-d67187c1f918 service nova] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Received unexpected event network-vif-plugged-c5eca2fb-a7a7-43a0-a2a0-5016e8c11f78 for instance with vm_state building and task_state spawning. [ 1029.549198] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Preparing fetch location {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1029.549824] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Fetch image to [datastore2] OSTACK_IMG_0e3cf2ad-e1c3-46a4-a3d0-a542e4aa9f48/OSTACK_IMG_0e3cf2ad-e1c3-46a4-a3d0-a542e4aa9f48.vmdk {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1029.549824] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Downloading stream optimized image 6185ea79-9c71-4180-85df-f64f05052bed to [datastore2] OSTACK_IMG_0e3cf2ad-e1c3-46a4-a3d0-a542e4aa9f48/OSTACK_IMG_0e3cf2ad-e1c3-46a4-a3d0-a542e4aa9f48.vmdk on the data store datastore2 as vApp {{(pid=63538) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1029.549824] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Downloading image file data 6185ea79-9c71-4180-85df-f64f05052bed to the ESX as VM named 'OSTACK_IMG_0e3cf2ad-e1c3-46a4-a3d0-a542e4aa9f48' {{(pid=63538) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1029.593413] env[63538]: DEBUG nova.network.neutron [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Successfully updated port: c5eca2fb-a7a7-43a0-a2a0-5016e8c11f78 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1029.611020] env[63538]: DEBUG oslo_concurrency.lockutils [req-6b7b82a4-7ca2-445a-b473-5898c63c2056 req-84027a64-a57a-4792-b605-5ffe8c58a8e6 service nova] Releasing lock "refresh_cache-0df15328-aebd-44c5-9c78-ee05f188ad95" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.611292] env[63538]: DEBUG nova.compute.manager [req-6b7b82a4-7ca2-445a-b473-5898c63c2056 req-84027a64-a57a-4792-b605-5ffe8c58a8e6 service nova] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Received event network-vif-deleted-bcfdefa5-ccee-41b2-8eeb-88554605c6e3 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1029.625513] env[63538]: DEBUG oslo_vmware.rw_handles [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1029.625513] env[63538]: value = "resgroup-9" [ 1029.625513] env[63538]: _type = "ResourcePool" [ 1029.625513] env[63538]: }. {{(pid=63538) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1029.625835] env[63538]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-6cb97de8-438e-4b82-9a48-88327baa58bb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.647473] env[63538]: DEBUG oslo_vmware.rw_handles [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lease: (returnval){ [ 1029.647473] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5205fba1-cccd-8fc1-eb0b-8588774aaff6" [ 1029.647473] env[63538]: _type = "HttpNfcLease" [ 1029.647473] env[63538]: } obtained for vApp import into resource pool (val){ [ 1029.647473] env[63538]: value = "resgroup-9" [ 1029.647473] env[63538]: _type = "ResourcePool" [ 1029.647473] env[63538]: }. {{(pid=63538) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1029.647893] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the lease: (returnval){ [ 1029.647893] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5205fba1-cccd-8fc1-eb0b-8588774aaff6" [ 1029.647893] env[63538]: _type = "HttpNfcLease" [ 1029.647893] env[63538]: } to be ready. {{(pid=63538) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1029.655374] env[63538]: DEBUG nova.objects.base [None req-1a4d39ac-3027-40db-b901-8de4cf972205 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Object Instance<209c5f46-9c63-4f55-bc75-bc2e4da989ac> lazy-loaded attributes: flavor,pci_requests {{(pid=63538) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1029.655596] env[63538]: DEBUG nova.network.neutron [None req-1a4d39ac-3027-40db-b901-8de4cf972205 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1029.657537] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1029.657537] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5205fba1-cccd-8fc1-eb0b-8588774aaff6" [ 1029.657537] env[63538]: _type = "HttpNfcLease" [ 1029.657537] env[63538]: } is initializing. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1029.668465] env[63538]: DEBUG oslo_concurrency.lockutils [None req-716bf7c2-b8ca-4267-b37e-5458f9fe6059 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Lock "148790a7-0a35-4d26-ae9f-6f954a161c88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.527s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.764288] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a4d39ac-3027-40db-b901-8de4cf972205 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "interface-209c5f46-9c63-4f55-bc75-bc2e4da989ac-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.119s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.942750] env[63538]: DEBUG nova.compute.utils [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1029.944930] env[63538]: DEBUG nova.compute.manager [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1029.945228] env[63538]: DEBUG nova.network.neutron [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1029.994529] env[63538]: DEBUG nova.policy [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '884364daa8f746fb9b32a8b33c9e2cfd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea05f3fb4676466bb2a286f5a2fefb8f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1030.098146] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "refresh_cache-fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1030.098326] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired lock "refresh_cache-fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.098462] env[63538]: DEBUG nova.network.neutron [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1030.159604] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1030.159604] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5205fba1-cccd-8fc1-eb0b-8588774aaff6" [ 1030.159604] env[63538]: _type = "HttpNfcLease" [ 1030.159604] env[63538]: } is ready. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1030.159891] env[63538]: DEBUG oslo_vmware.rw_handles [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1030.159891] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5205fba1-cccd-8fc1-eb0b-8588774aaff6" [ 1030.159891] env[63538]: _type = "HttpNfcLease" [ 1030.159891] env[63538]: }. {{(pid=63538) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1030.160680] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a7073a-6f91-43f9-84a2-05ef0c6ed89e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.169017] env[63538]: DEBUG oslo_vmware.rw_handles [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52172853-ed12-715c-5d80-6b5150e0e3a5/disk-0.vmdk from lease info. {{(pid=63538) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1030.169232] env[63538]: DEBUG oslo_vmware.rw_handles [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Creating HTTP connection to write to file with size = 31665152 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52172853-ed12-715c-5d80-6b5150e0e3a5/disk-0.vmdk. {{(pid=63538) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1030.173139] env[63538]: INFO nova.compute.manager [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Rebuilding instance [ 1030.247946] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1fa4ea23-38d6-4c31-ac27-6bb0df85a23c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.275182] env[63538]: DEBUG nova.compute.manager [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1030.276623] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751df83d-33e7-46d1-81f1-a4b1d5b19822 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.304683] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e088fce0-85e2-479c-a533-a460c1329621 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.316559] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a898b472-c9b5-4993-beee-44e71a993856 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.349218] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1426aab9-e015-42f8-a14a-a517da06cda9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.359391] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a69705-4595-48ae-a31e-c07836cb0056 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.375188] env[63538]: DEBUG nova.compute.provider_tree [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1030.406597] env[63538]: DEBUG nova.network.neutron [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Successfully created port: f05ac716-e765-4cba-958a-3ab686f9dbf9 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1030.450200] env[63538]: DEBUG nova.compute.manager [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1030.635335] env[63538]: DEBUG nova.network.neutron [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1030.792207] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1030.792603] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-70bb6ebf-a9d0-4c0b-95ef-adc684dbd845 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.797566] env[63538]: DEBUG nova.network.neutron [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Updating instance_info_cache with network_info: [{"id": "c5eca2fb-a7a7-43a0-a2a0-5016e8c11f78", "address": "fa:16:3e:85:66:7a", "network": {"id": "8facc2ff-7a17-4beb-ad4f-7cf9d95cc93c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-137157780-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a701618902d411b8af203fdbb1069be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5eca2fb-a7", "ovs_interfaceid": "c5eca2fb-a7a7-43a0-a2a0-5016e8c11f78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.802670] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Waiting for the task: (returnval){ [ 1030.802670] env[63538]: value = "task-5101476" [ 1030.802670] env[63538]: _type = "Task" [ 1030.802670] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.814311] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101476, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.878572] env[63538]: DEBUG nova.scheduler.client.report [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1031.300501] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Releasing lock "refresh_cache-fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1031.300850] env[63538]: DEBUG nova.compute.manager [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Instance network_info: |[{"id": "c5eca2fb-a7a7-43a0-a2a0-5016e8c11f78", "address": "fa:16:3e:85:66:7a", "network": {"id": "8facc2ff-7a17-4beb-ad4f-7cf9d95cc93c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-137157780-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a701618902d411b8af203fdbb1069be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5eca2fb-a7", "ovs_interfaceid": "c5eca2fb-a7a7-43a0-a2a0-5016e8c11f78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1031.301467] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:66:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b80dd748-3d7e-4a23-a38d-9e79a3881452', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c5eca2fb-a7a7-43a0-a2a0-5016e8c11f78', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1031.309675] env[63538]: DEBUG oslo.service.loopingcall [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1031.311934] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1031.318427] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ede9b074-2272-40e0-9fa0-c83232b6daf2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.344284] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101476, 'name': PowerOffVM_Task, 'duration_secs': 0.426117} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.347062] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1031.347062] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1031.347234] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1031.347234] env[63538]: value = "task-5101477" [ 1031.347234] env[63538]: _type = "Task" [ 1031.347234] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.348230] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-771e6677-1b73-47b5-886e-88fec60d9491 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.359524] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1031.364158] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-deebabfd-128a-4cbf-b2e2-deed8fd2d5d4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.368749] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101477, 'name': CreateVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.385911] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.944s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.389409] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.860s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.389754] env[63538]: DEBUG nova.objects.instance [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lazy-loading 'resources' on Instance uuid edcc5700-7b1e-494a-82d1-844373a9d5a6 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1031.393232] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1031.393388] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1031.394046] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Deleting the datastore file [datastore2] 148790a7-0a35-4d26-ae9f-6f954a161c88 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1031.394155] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e4d52c06-1efa-4c5d-ae1e-3d156899c49b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.402017] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Waiting for the task: (returnval){ [ 1031.402017] env[63538]: value = "task-5101479" [ 1031.402017] env[63538]: _type = "Task" [ 1031.402017] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.414777] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101479, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.416305] env[63538]: INFO nova.scheduler.client.report [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Deleted allocations for instance 4f81dc4e-2092-4a2c-a511-589d47d118b6 [ 1031.462414] env[63538]: DEBUG nova.compute.manager [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1031.493594] env[63538]: DEBUG nova.virt.hardware [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1031.493978] env[63538]: DEBUG nova.virt.hardware [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1031.494243] env[63538]: DEBUG nova.virt.hardware [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1031.494590] env[63538]: DEBUG nova.virt.hardware [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1031.494922] env[63538]: DEBUG nova.virt.hardware [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1031.495133] env[63538]: DEBUG nova.virt.hardware [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1031.495474] env[63538]: DEBUG nova.virt.hardware [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1031.495770] env[63538]: DEBUG nova.virt.hardware [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1031.495996] env[63538]: DEBUG nova.virt.hardware [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1031.496277] env[63538]: DEBUG nova.virt.hardware [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1031.496543] env[63538]: DEBUG nova.virt.hardware [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1031.497922] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940cd38f-b901-413b-bfd3-13ff19e3c2a8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.511249] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a587eaf8-e029-430d-b453-f307c0c51d29 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.518555] env[63538]: DEBUG oslo_vmware.rw_handles [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Completed reading data from the image iterator. {{(pid=63538) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1031.518979] env[63538]: DEBUG oslo_vmware.rw_handles [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52172853-ed12-715c-5d80-6b5150e0e3a5/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1031.520345] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2401c534-0b51-463c-b444-c8a635531a90 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.528670] env[63538]: DEBUG oslo_vmware.rw_handles [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52172853-ed12-715c-5d80-6b5150e0e3a5/disk-0.vmdk is in state: ready. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1031.528911] env[63538]: DEBUG oslo_vmware.rw_handles [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52172853-ed12-715c-5d80-6b5150e0e3a5/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1031.538158] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-c3f9d3f7-3659-4b05-bc63-21b1173b107c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.747040] env[63538]: DEBUG oslo_vmware.rw_handles [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52172853-ed12-715c-5d80-6b5150e0e3a5/disk-0.vmdk. {{(pid=63538) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1031.747105] env[63538]: INFO nova.virt.vmwareapi.images [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Downloaded image file data 6185ea79-9c71-4180-85df-f64f05052bed [ 1031.747941] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91379c5b-fecc-47c0-a709-cc01bf5c3702 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.766387] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c50f9c14-3184-4518-904b-2fe3fea13103 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.771396] env[63538]: DEBUG nova.compute.manager [req-b0e03b51-c342-4a69-b448-8e8c3a313c69 req-b14c8cb5-58b9-4c2e-811f-2598512d9d43 service nova] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Received event network-changed-c5eca2fb-a7a7-43a0-a2a0-5016e8c11f78 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1031.771601] env[63538]: DEBUG nova.compute.manager [req-b0e03b51-c342-4a69-b448-8e8c3a313c69 req-b14c8cb5-58b9-4c2e-811f-2598512d9d43 service nova] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Refreshing instance network info cache due to event network-changed-c5eca2fb-a7a7-43a0-a2a0-5016e8c11f78. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1031.771860] env[63538]: DEBUG oslo_concurrency.lockutils [req-b0e03b51-c342-4a69-b448-8e8c3a313c69 req-b14c8cb5-58b9-4c2e-811f-2598512d9d43 service nova] Acquiring lock "refresh_cache-fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.772038] env[63538]: DEBUG oslo_concurrency.lockutils [req-b0e03b51-c342-4a69-b448-8e8c3a313c69 req-b14c8cb5-58b9-4c2e-811f-2598512d9d43 service nova] Acquired lock "refresh_cache-fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.772224] env[63538]: DEBUG nova.network.neutron [req-b0e03b51-c342-4a69-b448-8e8c3a313c69 req-b14c8cb5-58b9-4c2e-811f-2598512d9d43 service nova] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Refreshing network info cache for port c5eca2fb-a7a7-43a0-a2a0-5016e8c11f78 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1031.791352] env[63538]: INFO nova.virt.vmwareapi.images [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] The imported VM was unregistered [ 1031.793998] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Caching image {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1031.794350] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Creating directory with path [datastore2] devstack-image-cache_base/6185ea79-9c71-4180-85df-f64f05052bed {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1031.794590] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8e446606-aa97-448d-aa2d-273a0b1bc090 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.809140] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Created directory with path [datastore2] devstack-image-cache_base/6185ea79-9c71-4180-85df-f64f05052bed {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1031.809453] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_0e3cf2ad-e1c3-46a4-a3d0-a542e4aa9f48/OSTACK_IMG_0e3cf2ad-e1c3-46a4-a3d0-a542e4aa9f48.vmdk to [datastore2] devstack-image-cache_base/6185ea79-9c71-4180-85df-f64f05052bed/6185ea79-9c71-4180-85df-f64f05052bed.vmdk. {{(pid=63538) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1031.809734] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-b016ebe6-c849-4101-a52a-a6a5a33387f4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.817781] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1031.817781] env[63538]: value = "task-5101481" [ 1031.817781] env[63538]: _type = "Task" [ 1031.817781] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.826391] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101481, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.861026] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101477, 'name': CreateVM_Task, 'duration_secs': 0.392721} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.861209] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1031.861971] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.862214] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.862519] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1031.862771] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9cf1de61-7798-4c9e-bdd1-a0428bb9dceb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.867763] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1031.867763] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52aad108-64b3-77c9-317f-5f68b91acb44" [ 1031.867763] env[63538]: _type = "Task" [ 1031.867763] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.871583] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "8ed0bd15-71fc-435e-9e4a-90b023ad8a79" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.871894] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "8ed0bd15-71fc-435e-9e4a-90b023ad8a79" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.872120] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "8ed0bd15-71fc-435e-9e4a-90b023ad8a79-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.872335] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "8ed0bd15-71fc-435e-9e4a-90b023ad8a79-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.872553] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "8ed0bd15-71fc-435e-9e4a-90b023ad8a79-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.876770] env[63538]: INFO nova.compute.manager [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Terminating instance [ 1031.882329] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52aad108-64b3-77c9-317f-5f68b91acb44, 'name': SearchDatastore_Task, 'duration_secs': 0.009907} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.883052] env[63538]: DEBUG nova.compute.manager [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1031.883284] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1031.883665] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1031.883920] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1031.884197] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.884383] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.884611] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1031.885440] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059bc034-8563-4e9e-b497-c92cbbc3d8ff {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.888431] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d00c8e53-494b-4c3f-8d60-1e90f0c3469f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.897139] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1031.898195] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea12ecd6-8cd7-4043-b998-5f99749a65ff {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.900423] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1031.900636] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1031.901392] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44c4473a-f5ec-4c6c-9bdf-3df281396183 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.909317] env[63538]: DEBUG oslo_vmware.api [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1031.909317] env[63538]: value = "task-5101482" [ 1031.909317] env[63538]: _type = "Task" [ 1031.909317] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.914266] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1031.914266] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52fba7b4-de24-72e8-fbb9-42368539842a" [ 1031.914266] env[63538]: _type = "Task" [ 1031.914266] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.922377] env[63538]: DEBUG oslo_concurrency.lockutils [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "interface-209c5f46-9c63-4f55-bc75-bc2e4da989ac-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.922672] env[63538]: DEBUG oslo_concurrency.lockutils [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "interface-209c5f46-9c63-4f55-bc75-bc2e4da989ac-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.923058] env[63538]: DEBUG nova.objects.instance [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lazy-loading 'flavor' on Instance uuid 209c5f46-9c63-4f55-bc75-bc2e4da989ac {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1031.930886] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101479, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145505} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.934381] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c0cb605c-71ca-41a0-90ac-bda3bd7a4c42 tempest-ImagesNegativeTestJSON-966445768 tempest-ImagesNegativeTestJSON-966445768-project-member] Lock "4f81dc4e-2092-4a2c-a511-589d47d118b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.995s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.936840] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1031.937090] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1031.937288] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1031.946346] env[63538]: DEBUG oslo_vmware.api [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101482, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.953157] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52fba7b4-de24-72e8-fbb9-42368539842a, 'name': SearchDatastore_Task, 'duration_secs': 0.012455} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.954876] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4dddbd7c-925c-4337-a9c9-801225a1670a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.965035] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1031.965035] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5268e1af-d6a4-43ca-e17e-3d6d3e40b6f9" [ 1031.965035] env[63538]: _type = "Task" [ 1031.965035] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.974900] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5268e1af-d6a4-43ca-e17e-3d6d3e40b6f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.191341] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea7867f-484c-4704-a35b-4866d3f93e22 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.200567] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7be01d0e-902a-4d95-918f-bfed92b56002 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.235656] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d114379-611f-4ab1-b085-06333c337409 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.245243] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a46bee-2690-4bfa-8949-405e3772ec5a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.261480] env[63538]: DEBUG nova.compute.provider_tree [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1032.331329] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101481, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.351443] env[63538]: DEBUG nova.network.neutron [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Successfully updated port: f05ac716-e765-4cba-958a-3ab686f9dbf9 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1032.427087] env[63538]: DEBUG oslo_vmware.api [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101482, 'name': PowerOffVM_Task, 'duration_secs': 0.486743} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.430693] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1032.431348] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1032.431448] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bba44b81-e536-4e12-a453-d914df3274a0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.479273] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5268e1af-d6a4-43ca-e17e-3d6d3e40b6f9, 'name': SearchDatastore_Task, 'duration_secs': 0.018114} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.479563] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1032.480113] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9/fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1032.481976] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-921b5146-42e3-42dd-b839-982d8c13a4dc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.488563] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1032.488563] env[63538]: value = "task-5101484" [ 1032.488563] env[63538]: _type = "Task" [ 1032.488563] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.499853] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101484, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.548724] env[63538]: DEBUG nova.network.neutron [req-b0e03b51-c342-4a69-b448-8e8c3a313c69 req-b14c8cb5-58b9-4c2e-811f-2598512d9d43 service nova] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Updated VIF entry in instance network info cache for port c5eca2fb-a7a7-43a0-a2a0-5016e8c11f78. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1032.549234] env[63538]: DEBUG nova.network.neutron [req-b0e03b51-c342-4a69-b448-8e8c3a313c69 req-b14c8cb5-58b9-4c2e-811f-2598512d9d43 service nova] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Updating instance_info_cache with network_info: [{"id": "c5eca2fb-a7a7-43a0-a2a0-5016e8c11f78", "address": "fa:16:3e:85:66:7a", "network": {"id": "8facc2ff-7a17-4beb-ad4f-7cf9d95cc93c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-137157780-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a701618902d411b8af203fdbb1069be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5eca2fb-a7", "ovs_interfaceid": "c5eca2fb-a7a7-43a0-a2a0-5016e8c11f78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.591381] env[63538]: DEBUG nova.objects.instance [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lazy-loading 'pci_requests' on Instance uuid 209c5f46-9c63-4f55-bc75-bc2e4da989ac {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1032.764483] env[63538]: DEBUG nova.scheduler.client.report [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1032.832414] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101481, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.855176] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "refresh_cache-7752c64f-693f-4cf3-951c-7ee0657f1682" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1032.855414] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired lock "refresh_cache-7752c64f-693f-4cf3-951c-7ee0657f1682" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.855589] env[63538]: DEBUG nova.network.neutron [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1032.987899] env[63538]: DEBUG nova.virt.hardware [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1032.988183] env[63538]: DEBUG nova.virt.hardware [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1032.988373] env[63538]: DEBUG nova.virt.hardware [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1032.988563] env[63538]: DEBUG nova.virt.hardware [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1032.988713] env[63538]: DEBUG nova.virt.hardware [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1032.988939] env[63538]: DEBUG nova.virt.hardware [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1032.989185] env[63538]: DEBUG nova.virt.hardware [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1032.989378] env[63538]: DEBUG nova.virt.hardware [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1032.989522] env[63538]: DEBUG nova.virt.hardware [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1032.989719] env[63538]: DEBUG nova.virt.hardware [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1032.989901] env[63538]: DEBUG nova.virt.hardware [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1032.990878] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d38093a-df10-421c-96ea-a0626f6abea2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.004096] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3250674-75d4-40fb-a5d3-20dbda167868 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.013346] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101484, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.015521] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1033.015852] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1033.016084] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Deleting the datastore file [datastore2] 8ed0bd15-71fc-435e-9e4a-90b023ad8a79 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1033.016395] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3c427fb-4aca-4c1d-9eab-fda47d62bf03 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.028414] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Instance VIF info [] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1033.035042] env[63538]: DEBUG oslo.service.loopingcall [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1033.036062] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1033.036449] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1ef1da48-5122-44d3-acc4-bacbe82da668 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.051439] env[63538]: DEBUG oslo_vmware.api [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1033.051439] env[63538]: value = "task-5101485" [ 1033.051439] env[63538]: _type = "Task" [ 1033.051439] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.053681] env[63538]: DEBUG oslo_concurrency.lockutils [req-b0e03b51-c342-4a69-b448-8e8c3a313c69 req-b14c8cb5-58b9-4c2e-811f-2598512d9d43 service nova] Releasing lock "refresh_cache-fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1033.060542] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1033.060542] env[63538]: value = "task-5101486" [ 1033.060542] env[63538]: _type = "Task" [ 1033.060542] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.064980] env[63538]: DEBUG oslo_vmware.api [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101485, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.074758] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101486, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.093813] env[63538]: DEBUG nova.objects.base [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Object Instance<209c5f46-9c63-4f55-bc75-bc2e4da989ac> lazy-loaded attributes: flavor,pci_requests {{(pid=63538) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1033.094169] env[63538]: DEBUG nova.network.neutron [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1033.208162] env[63538]: DEBUG nova.policy [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ebe77c0e32ce4a32b290ba5088e107f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7063c42297c24f2baf7271fa25dec927', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1033.270487] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.881s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.274419] env[63538]: DEBUG oslo_concurrency.lockutils [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.976s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1033.275427] env[63538]: DEBUG nova.objects.instance [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Lazy-loading 'resources' on Instance uuid 144df97e-f47b-4ead-8243-345d98b9f3e6 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1033.299696] env[63538]: INFO nova.scheduler.client.report [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Deleted allocations for instance edcc5700-7b1e-494a-82d1-844373a9d5a6 [ 1033.332064] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101481, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.404853] env[63538]: DEBUG nova.network.neutron [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1033.500798] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101484, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.563425] env[63538]: DEBUG oslo_vmware.api [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101485, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.566066] env[63538]: DEBUG nova.network.neutron [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Updating instance_info_cache with network_info: [{"id": "f05ac716-e765-4cba-958a-3ab686f9dbf9", "address": "fa:16:3e:db:1a:5d", "network": {"id": "4aa7976c-5900-4be2-a5e2-8d641ac24be9", "bridge": "br-int", "label": "tempest-ServersTestJSON-119331503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea05f3fb4676466bb2a286f5a2fefb8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf05ac716-e7", "ovs_interfaceid": "f05ac716-e765-4cba-958a-3ab686f9dbf9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.576907] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101486, 'name': CreateVM_Task, 'duration_secs': 0.356083} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.577830] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1033.577830] env[63538]: DEBUG oslo_concurrency.lockutils [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1033.577830] env[63538]: DEBUG oslo_concurrency.lockutils [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.578303] env[63538]: DEBUG oslo_concurrency.lockutils [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1033.578484] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ca34782-479a-4900-b51c-f1178be80953 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.585693] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Waiting for the task: (returnval){ [ 1033.585693] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ec4530-13bd-a202-316f-2b6c7c085ada" [ 1033.585693] env[63538]: _type = "Task" [ 1033.585693] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.594548] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ec4530-13bd-a202-316f-2b6c7c085ada, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.810142] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30f0a6b5-cc9e-47d1-a851-5ffc516d09e3 tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "edcc5700-7b1e-494a-82d1-844373a9d5a6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.410s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.837186] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101481, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.890170] env[63538]: DEBUG nova.network.neutron [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Successfully created port: 3d1c322d-13b3-4f3d-a880-d456b548938f {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1034.001677] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101484, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.067307] env[63538]: DEBUG oslo_vmware.api [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101485, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.068522] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Releasing lock "refresh_cache-7752c64f-693f-4cf3-951c-7ee0657f1682" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.068862] env[63538]: DEBUG nova.compute.manager [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Instance network_info: |[{"id": "f05ac716-e765-4cba-958a-3ab686f9dbf9", "address": "fa:16:3e:db:1a:5d", "network": {"id": "4aa7976c-5900-4be2-a5e2-8d641ac24be9", "bridge": "br-int", "label": "tempest-ServersTestJSON-119331503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea05f3fb4676466bb2a286f5a2fefb8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf05ac716-e7", "ovs_interfaceid": "f05ac716-e765-4cba-958a-3ab686f9dbf9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1034.069352] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:1a:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f05ac716-e765-4cba-958a-3ab686f9dbf9', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1034.077753] env[63538]: DEBUG oslo.service.loopingcall [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1034.081018] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1034.081508] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d1541e8c-5f85-410e-84b1-71b3bc9727b5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.111986] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ec4530-13bd-a202-316f-2b6c7c085ada, 'name': SearchDatastore_Task, 'duration_secs': 0.087418} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.113830] env[63538]: DEBUG oslo_concurrency.lockutils [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.114154] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1034.114400] env[63538]: DEBUG oslo_concurrency.lockutils [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.114566] env[63538]: DEBUG oslo_concurrency.lockutils [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.114760] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1034.115044] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1034.115044] env[63538]: value = "task-5101487" [ 1034.115044] env[63538]: _type = "Task" [ 1034.115044] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.115433] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be8755f7-eb6a-41e4-b568-c9d0ebaa8230 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.122908] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5a8c3ac-1db5-4b3d-bb9d-a355156433fa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.132635] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101487, 'name': CreateVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.135497] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b217f55-1390-476d-a42c-5d87a9a7ce8d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.142974] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1034.143256] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1034.144222] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5ec3d75-915f-412e-9dac-9439f6b9abd3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.175119] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-877b8474-85e0-4caf-a76e-ba085c5e6f24 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.179856] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Waiting for the task: (returnval){ [ 1034.179856] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52186f28-7029-f9d4-e646-29c99fb6b37e" [ 1034.179856] env[63538]: _type = "Task" [ 1034.179856] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.188803] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa1c0fd7-68cd-44b1-ba14-e20b2a7476c4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.201616] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52186f28-7029-f9d4-e646-29c99fb6b37e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.214300] env[63538]: DEBUG nova.compute.provider_tree [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1034.342824] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101481, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.506526] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101484, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.567309] env[63538]: DEBUG nova.compute.manager [req-c76a8602-ad72-4304-b9bf-6f2a07950499 req-2786a069-235c-4a20-8f02-b127231c8a37 service nova] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Received event network-vif-plugged-f05ac716-e765-4cba-958a-3ab686f9dbf9 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1034.567845] env[63538]: DEBUG oslo_concurrency.lockutils [req-c76a8602-ad72-4304-b9bf-6f2a07950499 req-2786a069-235c-4a20-8f02-b127231c8a37 service nova] Acquiring lock "7752c64f-693f-4cf3-951c-7ee0657f1682-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.568388] env[63538]: DEBUG oslo_concurrency.lockutils [req-c76a8602-ad72-4304-b9bf-6f2a07950499 req-2786a069-235c-4a20-8f02-b127231c8a37 service nova] Lock "7752c64f-693f-4cf3-951c-7ee0657f1682-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.568690] env[63538]: DEBUG oslo_concurrency.lockutils [req-c76a8602-ad72-4304-b9bf-6f2a07950499 req-2786a069-235c-4a20-8f02-b127231c8a37 service nova] Lock "7752c64f-693f-4cf3-951c-7ee0657f1682-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.568941] env[63538]: DEBUG nova.compute.manager [req-c76a8602-ad72-4304-b9bf-6f2a07950499 req-2786a069-235c-4a20-8f02-b127231c8a37 service nova] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] No waiting events found dispatching network-vif-plugged-f05ac716-e765-4cba-958a-3ab686f9dbf9 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1034.570761] env[63538]: WARNING nova.compute.manager [req-c76a8602-ad72-4304-b9bf-6f2a07950499 req-2786a069-235c-4a20-8f02-b127231c8a37 service nova] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Received unexpected event network-vif-plugged-f05ac716-e765-4cba-958a-3ab686f9dbf9 for instance with vm_state building and task_state spawning. [ 1034.570761] env[63538]: DEBUG nova.compute.manager [req-c76a8602-ad72-4304-b9bf-6f2a07950499 req-2786a069-235c-4a20-8f02-b127231c8a37 service nova] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Received event network-changed-f05ac716-e765-4cba-958a-3ab686f9dbf9 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1034.570761] env[63538]: DEBUG nova.compute.manager [req-c76a8602-ad72-4304-b9bf-6f2a07950499 req-2786a069-235c-4a20-8f02-b127231c8a37 service nova] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Refreshing instance network info cache due to event network-changed-f05ac716-e765-4cba-958a-3ab686f9dbf9. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1034.570761] env[63538]: DEBUG oslo_concurrency.lockutils [req-c76a8602-ad72-4304-b9bf-6f2a07950499 req-2786a069-235c-4a20-8f02-b127231c8a37 service nova] Acquiring lock "refresh_cache-7752c64f-693f-4cf3-951c-7ee0657f1682" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.570761] env[63538]: DEBUG oslo_concurrency.lockutils [req-c76a8602-ad72-4304-b9bf-6f2a07950499 req-2786a069-235c-4a20-8f02-b127231c8a37 service nova] Acquired lock "refresh_cache-7752c64f-693f-4cf3-951c-7ee0657f1682" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.571273] env[63538]: DEBUG nova.network.neutron [req-c76a8602-ad72-4304-b9bf-6f2a07950499 req-2786a069-235c-4a20-8f02-b127231c8a37 service nova] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Refreshing network info cache for port f05ac716-e765-4cba-958a-3ab686f9dbf9 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1034.584556] env[63538]: DEBUG oslo_vmware.api [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101485, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.629137] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101487, 'name': CreateVM_Task, 'duration_secs': 0.41065} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.629329] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1034.630107] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.630354] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.630702] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1034.630991] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3707cdf-60f9-450a-8884-1d55fd4a1e43 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.639480] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 1034.639480] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524c9325-5fce-ef0d-3d4f-632953498078" [ 1034.639480] env[63538]: _type = "Task" [ 1034.639480] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.656359] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524c9325-5fce-ef0d-3d4f-632953498078, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.692290] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52186f28-7029-f9d4-e646-29c99fb6b37e, 'name': SearchDatastore_Task, 'duration_secs': 0.092666} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.693857] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1892f249-2bd0-4e1b-9e58-518159421404 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.702239] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Waiting for the task: (returnval){ [ 1034.702239] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e18771-0841-a59e-6db9-9aed1d91776d" [ 1034.702239] env[63538]: _type = "Task" [ 1034.702239] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.713274] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e18771-0841-a59e-6db9-9aed1d91776d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.718360] env[63538]: DEBUG nova.scheduler.client.report [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1034.756074] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "5ef5fe70-fed9-4b3d-9d43-f01cf628d9af" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.756816] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "5ef5fe70-fed9-4b3d-9d43-f01cf628d9af" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.835107] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101481, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.813732} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.835412] env[63538]: INFO nova.virt.vmwareapi.ds_util [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_0e3cf2ad-e1c3-46a4-a3d0-a542e4aa9f48/OSTACK_IMG_0e3cf2ad-e1c3-46a4-a3d0-a542e4aa9f48.vmdk to [datastore2] devstack-image-cache_base/6185ea79-9c71-4180-85df-f64f05052bed/6185ea79-9c71-4180-85df-f64f05052bed.vmdk. [ 1034.835616] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Cleaning up location [datastore2] OSTACK_IMG_0e3cf2ad-e1c3-46a4-a3d0-a542e4aa9f48 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1034.835808] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_0e3cf2ad-e1c3-46a4-a3d0-a542e4aa9f48 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1034.836121] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fcdb1b08-4e43-4840-a977-232a5b2815bf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.845131] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1034.845131] env[63538]: value = "task-5101488" [ 1034.845131] env[63538]: _type = "Task" [ 1034.845131] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.857131] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101488, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.003618] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101484, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.068840] env[63538]: DEBUG oslo_vmware.api [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101485, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.545951} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.068840] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1035.069044] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1035.070022] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1035.070022] env[63538]: INFO nova.compute.manager [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Took 3.19 seconds to destroy the instance on the hypervisor. [ 1035.070022] env[63538]: DEBUG oslo.service.loopingcall [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1035.070022] env[63538]: DEBUG nova.compute.manager [-] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1035.070022] env[63538]: DEBUG nova.network.neutron [-] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1035.151344] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524c9325-5fce-ef0d-3d4f-632953498078, 'name': SearchDatastore_Task, 'duration_secs': 0.021512} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.151771] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.152072] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1035.152341] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.212935] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e18771-0841-a59e-6db9-9aed1d91776d, 'name': SearchDatastore_Task, 'duration_secs': 0.017135} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.213394] env[63538]: DEBUG oslo_concurrency.lockutils [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.213734] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 148790a7-0a35-4d26-ae9f-6f954a161c88/148790a7-0a35-4d26-ae9f-6f954a161c88.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1035.214288] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.215081] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1035.215347] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e3599166-4f49-4a2a-bba2-dea20c4c4b06 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.218012] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2426289e-24c3-4630-960d-a93005e98a58 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.224558] env[63538]: DEBUG oslo_concurrency.lockutils [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.950s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.228890] env[63538]: DEBUG oslo_concurrency.lockutils [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.694s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.230860] env[63538]: INFO nova.compute.claims [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1035.234676] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Waiting for the task: (returnval){ [ 1035.234676] env[63538]: value = "task-5101489" [ 1035.234676] env[63538]: _type = "Task" [ 1035.234676] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.239311] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1035.239311] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1035.244110] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7b1de7e-450c-43c1-ad86-d89b6cbd56b9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.252395] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 1035.252395] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bfea68-5963-c181-4337-fcae2c1346ea" [ 1035.252395] env[63538]: _type = "Task" [ 1035.252395] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.254584] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101489, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.261024] env[63538]: INFO nova.scheduler.client.report [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Deleted allocations for instance 144df97e-f47b-4ead-8243-345d98b9f3e6 [ 1035.263263] env[63538]: DEBUG nova.compute.manager [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1035.278778] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bfea68-5963-c181-4337-fcae2c1346ea, 'name': SearchDatastore_Task, 'duration_secs': 0.010824} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.282143] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d99cfca-1e2c-46bc-abdf-509b7752026a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.289667] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 1035.289667] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52aa9774-0a4d-171e-43fb-e953e607d1dd" [ 1035.289667] env[63538]: _type = "Task" [ 1035.289667] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.300075] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52aa9774-0a4d-171e-43fb-e953e607d1dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.355931] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101488, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.039806} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.360841] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1035.360841] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6185ea79-9c71-4180-85df-f64f05052bed/6185ea79-9c71-4180-85df-f64f05052bed.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.360921] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6185ea79-9c71-4180-85df-f64f05052bed/6185ea79-9c71-4180-85df-f64f05052bed.vmdk to [datastore2] 0df15328-aebd-44c5-9c78-ee05f188ad95/0df15328-aebd-44c5-9c78-ee05f188ad95.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1035.361610] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5ae1aba-8c07-4c4c-8d96-df5917821886 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.366693] env[63538]: DEBUG nova.network.neutron [req-c76a8602-ad72-4304-b9bf-6f2a07950499 req-2786a069-235c-4a20-8f02-b127231c8a37 service nova] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Updated VIF entry in instance network info cache for port f05ac716-e765-4cba-958a-3ab686f9dbf9. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1035.367130] env[63538]: DEBUG nova.network.neutron [req-c76a8602-ad72-4304-b9bf-6f2a07950499 req-2786a069-235c-4a20-8f02-b127231c8a37 service nova] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Updating instance_info_cache with network_info: [{"id": "f05ac716-e765-4cba-958a-3ab686f9dbf9", "address": "fa:16:3e:db:1a:5d", "network": {"id": "4aa7976c-5900-4be2-a5e2-8d641ac24be9", "bridge": "br-int", "label": "tempest-ServersTestJSON-119331503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea05f3fb4676466bb2a286f5a2fefb8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf05ac716-e7", "ovs_interfaceid": "f05ac716-e765-4cba-958a-3ab686f9dbf9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.374385] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1035.374385] env[63538]: value = "task-5101490" [ 1035.374385] env[63538]: _type = "Task" [ 1035.374385] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.382973] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101490, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.511035] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101484, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.756322] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101489, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.776352] env[63538]: DEBUG oslo_concurrency.lockutils [None req-21fc8f9d-93a6-496a-ab1f-0f7eef6fdaac tempest-VolumesAdminNegativeTest-944677654 tempest-VolumesAdminNegativeTest-944677654-project-member] Lock "144df97e-f47b-4ead-8243-345d98b9f3e6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.371s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.804738] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52aa9774-0a4d-171e-43fb-e953e607d1dd, 'name': SearchDatastore_Task, 'duration_secs': 0.011177} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.804738] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.804738] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 7752c64f-693f-4cf3-951c-7ee0657f1682/7752c64f-693f-4cf3-951c-7ee0657f1682.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1035.805485] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fca41f1f-c598-4c70-88e7-cc23bebb8177 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.813626] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.815817] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 1035.815817] env[63538]: value = "task-5101491" [ 1035.815817] env[63538]: _type = "Task" [ 1035.815817] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.827489] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101491, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.871056] env[63538]: DEBUG oslo_concurrency.lockutils [req-c76a8602-ad72-4304-b9bf-6f2a07950499 req-2786a069-235c-4a20-8f02-b127231c8a37 service nova] Releasing lock "refresh_cache-7752c64f-693f-4cf3-951c-7ee0657f1682" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.888110] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101490, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.006950] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101484, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.174216} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.007642] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9/fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1036.008026] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1036.009186] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8d9274cc-8808-4240-8835-924536774118 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.020483] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1036.020483] env[63538]: value = "task-5101493" [ 1036.020483] env[63538]: _type = "Task" [ 1036.020483] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.024134] env[63538]: DEBUG nova.network.neutron [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Successfully updated port: 3d1c322d-13b3-4f3d-a880-d456b548938f {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1036.034102] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101493, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.255830] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101489, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.333785] env[63538]: DEBUG nova.compute.manager [req-2b4af30e-a87d-4c4a-b074-a9a67c45acf6 req-4f22b603-dd56-493a-92a3-1b641b5ba579 service nova] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Received event network-vif-deleted-3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1036.333927] env[63538]: INFO nova.compute.manager [req-2b4af30e-a87d-4c4a-b074-a9a67c45acf6 req-4f22b603-dd56-493a-92a3-1b641b5ba579 service nova] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Neutron deleted interface 3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46; detaching it from the instance and deleting it from the info cache [ 1036.334098] env[63538]: DEBUG nova.network.neutron [req-2b4af30e-a87d-4c4a-b074-a9a67c45acf6 req-4f22b603-dd56-493a-92a3-1b641b5ba579 service nova] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.342824] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101491, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.389036] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101490, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.528379] env[63538]: DEBUG oslo_concurrency.lockutils [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "refresh_cache-209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1036.528826] env[63538]: DEBUG oslo_concurrency.lockutils [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "refresh_cache-209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.529216] env[63538]: DEBUG nova.network.neutron [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1036.537685] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101493, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.563850] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b5d699-8856-4301-bb0a-b3f453da0bd6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.572133] env[63538]: DEBUG nova.network.neutron [-] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.575318] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b505d8ec-5713-4ca0-af75-42ec7e24d5f9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.622586] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-728c7067-5ce1-41ca-a559-ac4c610a06f9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.633877] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ecf867d-95b6-4f57-b7c0-8822e882e647 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.651604] env[63538]: DEBUG nova.compute.provider_tree [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1036.672988] env[63538]: DEBUG nova.compute.manager [req-40ea8b8b-709a-42ee-85ef-188bb72d78a0 req-5774a1ed-37f3-410f-bbe0-9de5c4b8ff53 service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Received event network-vif-plugged-3d1c322d-13b3-4f3d-a880-d456b548938f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1036.673263] env[63538]: DEBUG oslo_concurrency.lockutils [req-40ea8b8b-709a-42ee-85ef-188bb72d78a0 req-5774a1ed-37f3-410f-bbe0-9de5c4b8ff53 service nova] Acquiring lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.673488] env[63538]: DEBUG oslo_concurrency.lockutils [req-40ea8b8b-709a-42ee-85ef-188bb72d78a0 req-5774a1ed-37f3-410f-bbe0-9de5c4b8ff53 service nova] Lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.673731] env[63538]: DEBUG oslo_concurrency.lockutils [req-40ea8b8b-709a-42ee-85ef-188bb72d78a0 req-5774a1ed-37f3-410f-bbe0-9de5c4b8ff53 service nova] Lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.673891] env[63538]: DEBUG nova.compute.manager [req-40ea8b8b-709a-42ee-85ef-188bb72d78a0 req-5774a1ed-37f3-410f-bbe0-9de5c4b8ff53 service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] No waiting events found dispatching network-vif-plugged-3d1c322d-13b3-4f3d-a880-d456b548938f {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1036.674116] env[63538]: WARNING nova.compute.manager [req-40ea8b8b-709a-42ee-85ef-188bb72d78a0 req-5774a1ed-37f3-410f-bbe0-9de5c4b8ff53 service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Received unexpected event network-vif-plugged-3d1c322d-13b3-4f3d-a880-d456b548938f for instance with vm_state active and task_state None. [ 1036.674316] env[63538]: DEBUG nova.compute.manager [req-40ea8b8b-709a-42ee-85ef-188bb72d78a0 req-5774a1ed-37f3-410f-bbe0-9de5c4b8ff53 service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Received event network-changed-3d1c322d-13b3-4f3d-a880-d456b548938f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1036.674481] env[63538]: DEBUG nova.compute.manager [req-40ea8b8b-709a-42ee-85ef-188bb72d78a0 req-5774a1ed-37f3-410f-bbe0-9de5c4b8ff53 service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Refreshing instance network info cache due to event network-changed-3d1c322d-13b3-4f3d-a880-d456b548938f. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1036.674662] env[63538]: DEBUG oslo_concurrency.lockutils [req-40ea8b8b-709a-42ee-85ef-188bb72d78a0 req-5774a1ed-37f3-410f-bbe0-9de5c4b8ff53 service nova] Acquiring lock "refresh_cache-209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1036.753969] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101489, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.393531} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.754299] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 148790a7-0a35-4d26-ae9f-6f954a161c88/148790a7-0a35-4d26-ae9f-6f954a161c88.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1036.754526] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1036.758022] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a7504163-8a4b-44e3-a76d-ed543138d21f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.763383] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Waiting for the task: (returnval){ [ 1036.763383] env[63538]: value = "task-5101494" [ 1036.763383] env[63538]: _type = "Task" [ 1036.763383] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.773905] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101494, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.830814] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101491, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.842977] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8531784a-3fb5-4802-a251-09a606a8ce38 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.854020] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d5c327e-f955-4f1f-a382-54cd9617c628 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.899799] env[63538]: DEBUG nova.compute.manager [req-2b4af30e-a87d-4c4a-b074-a9a67c45acf6 req-4f22b603-dd56-493a-92a3-1b641b5ba579 service nova] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Detach interface failed, port_id=3c0dc74b-3e28-440d-ac1f-eb78c9b9cc46, reason: Instance 8ed0bd15-71fc-435e-9e4a-90b023ad8a79 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1036.906965] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101490, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.036230] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101493, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.869721} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.036594] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1037.038117] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45466af0-cf23-488c-89cb-93b40343acea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.066863] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9/fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1037.067313] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d418af98-a752-4787-82b3-1fd66dd3043b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.087583] env[63538]: INFO nova.compute.manager [-] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Took 2.02 seconds to deallocate network for instance. [ 1037.093256] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1037.093256] env[63538]: value = "task-5101495" [ 1037.093256] env[63538]: _type = "Task" [ 1037.093256] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.104094] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101495, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.105335] env[63538]: WARNING nova.network.neutron [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] 44a04a43-a979-4648-89b2-63323df5b0f3 already exists in list: networks containing: ['44a04a43-a979-4648-89b2-63323df5b0f3']. ignoring it [ 1037.155351] env[63538]: DEBUG nova.scheduler.client.report [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1037.282728] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101494, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084822} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.285507] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1037.286886] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-918345a2-338f-4bc4-adaf-92f5a12837f6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.317676] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] 148790a7-0a35-4d26-ae9f-6f954a161c88/148790a7-0a35-4d26-ae9f-6f954a161c88.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1037.318861] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5c608a6-215c-4ade-8672-f6fe7012b324 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.347135] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101491, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.351983] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Waiting for the task: (returnval){ [ 1037.351983] env[63538]: value = "task-5101496" [ 1037.351983] env[63538]: _type = "Task" [ 1037.351983] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.365988] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101496, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.393018] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101490, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.595675] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.613035] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101495, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.665695] env[63538]: DEBUG oslo_concurrency.lockutils [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.437s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.666577] env[63538]: DEBUG nova.compute.manager [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1037.672420] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.415s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.672775] env[63538]: DEBUG nova.objects.instance [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lazy-loading 'resources' on Instance uuid 0a0d0372-dede-4df0-bb9e-231e8a5b3742 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1037.782186] env[63538]: DEBUG nova.network.neutron [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Updating instance_info_cache with network_info: [{"id": "8240a40a-4486-4213-ac28-8eee15d652a8", "address": "fa:16:3e:41:82:69", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8240a40a-44", "ovs_interfaceid": "8240a40a-4486-4213-ac28-8eee15d652a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3d1c322d-13b3-4f3d-a880-d456b548938f", "address": "fa:16:3e:10:ab:9e", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d1c322d-13", "ovs_interfaceid": "3d1c322d-13b3-4f3d-a880-d456b548938f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.849238] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101491, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.869458] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101496, 'name': ReconfigVM_Task, 'duration_secs': 0.356342} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.870312] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Reconfigured VM instance instance-0000005d to attach disk [datastore2] 148790a7-0a35-4d26-ae9f-6f954a161c88/148790a7-0a35-4d26-ae9f-6f954a161c88.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1037.873217] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f8f3ae2a-478b-43d1-a057-1c3766f03279 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.886187] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Waiting for the task: (returnval){ [ 1037.886187] env[63538]: value = "task-5101497" [ 1037.886187] env[63538]: _type = "Task" [ 1037.886187] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.890438] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101490, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.901678] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101497, 'name': Rename_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.108357] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101495, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.178378] env[63538]: DEBUG nova.compute.utils [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1038.193722] env[63538]: DEBUG nova.compute.manager [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1038.194651] env[63538]: DEBUG nova.network.neutron [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1038.254695] env[63538]: DEBUG nova.policy [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '87c19c9ce3594acd96c1c215ef8ea555', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '422f50dc66ec48b7b262643390072f3d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1038.284048] env[63538]: DEBUG oslo_concurrency.lockutils [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "refresh_cache-209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1038.285035] env[63538]: DEBUG oslo_concurrency.lockutils [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1038.285137] env[63538]: DEBUG oslo_concurrency.lockutils [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.286156] env[63538]: DEBUG oslo_concurrency.lockutils [req-40ea8b8b-709a-42ee-85ef-188bb72d78a0 req-5774a1ed-37f3-410f-bbe0-9de5c4b8ff53 service nova] Acquired lock "refresh_cache-209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.286156] env[63538]: DEBUG nova.network.neutron [req-40ea8b8b-709a-42ee-85ef-188bb72d78a0 req-5774a1ed-37f3-410f-bbe0-9de5c4b8ff53 service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Refreshing network info cache for port 3d1c322d-13b3-4f3d-a880-d456b548938f {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1038.287797] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b62b77-5e3b-4f7a-877e-1c22bf9f69b5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.316610] env[63538]: DEBUG nova.virt.hardware [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1038.316988] env[63538]: DEBUG nova.virt.hardware [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1038.317094] env[63538]: DEBUG nova.virt.hardware [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1038.317286] env[63538]: DEBUG nova.virt.hardware [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1038.317457] env[63538]: DEBUG nova.virt.hardware [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1038.317680] env[63538]: DEBUG nova.virt.hardware [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1038.317956] env[63538]: DEBUG nova.virt.hardware [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1038.318193] env[63538]: DEBUG nova.virt.hardware [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1038.318395] env[63538]: DEBUG nova.virt.hardware [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1038.318603] env[63538]: DEBUG nova.virt.hardware [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1038.318750] env[63538]: DEBUG nova.virt.hardware [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1038.325931] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Reconfiguring VM to attach interface {{(pid=63538) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1929}} [ 1038.329840] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ecd01cf5-feb3-4966-93b3-bee8c50068b2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.353390] env[63538]: DEBUG oslo_vmware.api [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1038.353390] env[63538]: value = "task-5101498" [ 1038.353390] env[63538]: _type = "Task" [ 1038.353390] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.356875] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101491, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.377195] env[63538]: DEBUG oslo_vmware.api [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101498, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.394652] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101490, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.412797] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101497, 'name': Rename_Task, 'duration_secs': 0.157284} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.413303] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1038.413666] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f6f00f41-34cb-4035-bd59-5e9a49129489 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.426316] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Waiting for the task: (returnval){ [ 1038.426316] env[63538]: value = "task-5101499" [ 1038.426316] env[63538]: _type = "Task" [ 1038.426316] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.440195] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101499, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.573469] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d8b4b3-bd7e-4ca4-8e79-c238f0aebf05 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.583675] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89dabb5e-eff9-4a05-9ad6-53198ae6c570 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.625365] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c43db7d-fbdb-4290-a720-cd56d93cf281 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.639395] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-942840bf-7567-4184-80a7-d081a438bb5f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.643864] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101495, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.655154] env[63538]: DEBUG nova.compute.provider_tree [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1038.661755] env[63538]: DEBUG nova.network.neutron [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Successfully created port: 3d218127-a216-4abc-8a2a-b2e004d627cc {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1038.694777] env[63538]: DEBUG nova.compute.manager [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1038.853690] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101491, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.83593} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.854034] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 7752c64f-693f-4cf3-951c-7ee0657f1682/7752c64f-693f-4cf3-951c-7ee0657f1682.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1038.854499] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1038.854793] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9eb094a2-231d-4f8f-b591-51d856dc2e16 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.868202] env[63538]: DEBUG oslo_vmware.api [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101498, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.869559] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 1038.869559] env[63538]: value = "task-5101500" [ 1038.869559] env[63538]: _type = "Task" [ 1038.869559] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.877960] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101500, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.892380] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101490, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.230527} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.893509] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6185ea79-9c71-4180-85df-f64f05052bed/6185ea79-9c71-4180-85df-f64f05052bed.vmdk to [datastore2] 0df15328-aebd-44c5-9c78-ee05f188ad95/0df15328-aebd-44c5-9c78-ee05f188ad95.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1038.893768] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d62771-b5d3-4a65-9445-c7668b52df50 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.916641] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] 0df15328-aebd-44c5-9c78-ee05f188ad95/0df15328-aebd-44c5-9c78-ee05f188ad95.vmdk or device None with type streamOptimized {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1038.917105] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34c287cc-c2df-437d-8760-f48d0ebdcd7e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.946738] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101499, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.950032] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1038.950032] env[63538]: value = "task-5101501" [ 1038.950032] env[63538]: _type = "Task" [ 1038.950032] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.133131] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101495, 'name': ReconfigVM_Task, 'duration_secs': 1.984937} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.133816] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Reconfigured VM instance instance-0000005e to attach disk [datastore2] fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9/fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1039.134424] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2a26e042-f11b-4b7b-a18f-2bec88541fcd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.140847] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1039.140847] env[63538]: value = "task-5101502" [ 1039.140847] env[63538]: _type = "Task" [ 1039.140847] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.149703] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101502, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.158836] env[63538]: DEBUG nova.scheduler.client.report [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1039.375211] env[63538]: DEBUG oslo_vmware.api [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101498, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.387036] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101500, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07297} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.387036] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1039.387036] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e5a898a-310b-43d9-be70-c0146c7696d4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.414432] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] 7752c64f-693f-4cf3-951c-7ee0657f1682/7752c64f-693f-4cf3-951c-7ee0657f1682.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1039.416401] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3c5c7c2-f907-40ce-90c8-9272cf80ac7d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.444233] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 1039.444233] env[63538]: value = "task-5101503" [ 1039.444233] env[63538]: _type = "Task" [ 1039.444233] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.453162] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101499, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.462079] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101503, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.471370] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101501, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.511636] env[63538]: DEBUG nova.network.neutron [req-40ea8b8b-709a-42ee-85ef-188bb72d78a0 req-5774a1ed-37f3-410f-bbe0-9de5c4b8ff53 service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Updated VIF entry in instance network info cache for port 3d1c322d-13b3-4f3d-a880-d456b548938f. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1039.512119] env[63538]: DEBUG nova.network.neutron [req-40ea8b8b-709a-42ee-85ef-188bb72d78a0 req-5774a1ed-37f3-410f-bbe0-9de5c4b8ff53 service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Updating instance_info_cache with network_info: [{"id": "8240a40a-4486-4213-ac28-8eee15d652a8", "address": "fa:16:3e:41:82:69", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8240a40a-44", "ovs_interfaceid": "8240a40a-4486-4213-ac28-8eee15d652a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3d1c322d-13b3-4f3d-a880-d456b548938f", "address": "fa:16:3e:10:ab:9e", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d1c322d-13", "ovs_interfaceid": "3d1c322d-13b3-4f3d-a880-d456b548938f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.653696] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101502, 'name': Rename_Task, 'duration_secs': 0.183889} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.654018] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1039.654308] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-be5663a9-be17-4e89-822a-48ba25623415 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.663934] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.991s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.666723] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1039.666723] env[63538]: value = "task-5101504" [ 1039.666723] env[63538]: _type = "Task" [ 1039.666723] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.667150] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.683s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.667414] env[63538]: DEBUG nova.objects.instance [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lazy-loading 'resources' on Instance uuid 90e56075-0d77-467f-90be-913315b63b33 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1039.680001] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101504, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.699233] env[63538]: INFO nova.scheduler.client.report [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Deleted allocations for instance 0a0d0372-dede-4df0-bb9e-231e8a5b3742 [ 1039.705658] env[63538]: DEBUG nova.compute.manager [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1039.739949] env[63538]: DEBUG nova.virt.hardware [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1039.740279] env[63538]: DEBUG nova.virt.hardware [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1039.740519] env[63538]: DEBUG nova.virt.hardware [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1039.740759] env[63538]: DEBUG nova.virt.hardware [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1039.740959] env[63538]: DEBUG nova.virt.hardware [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1039.741188] env[63538]: DEBUG nova.virt.hardware [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1039.741468] env[63538]: DEBUG nova.virt.hardware [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1039.741692] env[63538]: DEBUG nova.virt.hardware [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1039.741914] env[63538]: DEBUG nova.virt.hardware [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1039.742171] env[63538]: DEBUG nova.virt.hardware [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1039.742404] env[63538]: DEBUG nova.virt.hardware [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1039.743543] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86c5ac68-a152-47a6-8c93-5e2747b6de21 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.753478] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce842fd-6f1d-4184-a392-943727419ca5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.870930] env[63538]: DEBUG oslo_vmware.api [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101498, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.950474] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101499, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.959013] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101503, 'name': ReconfigVM_Task, 'duration_secs': 0.496546} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.959796] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Reconfigured VM instance instance-0000005f to attach disk [datastore2] 7752c64f-693f-4cf3-951c-7ee0657f1682/7752c64f-693f-4cf3-951c-7ee0657f1682.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1039.960503] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9c461986-5734-4170-a06a-c581a57d9587 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.965622] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101501, 'name': ReconfigVM_Task, 'duration_secs': 0.949807} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.966469] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Reconfigured VM instance instance-00000047 to attach disk [datastore2] 0df15328-aebd-44c5-9c78-ee05f188ad95/0df15328-aebd-44c5-9c78-ee05f188ad95.vmdk or device None with type streamOptimized {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1039.967990] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_options': None, 'encryption_format': None, 'device_name': '/dev/sda', 'size': 0, 'encryption_secret_uuid': None, 'encrypted': False, 'guest_format': None, 'device_type': 'disk', 'boot_index': 0, 'disk_bus': None, 'image_id': 'faabbca4-e27b-433a-b93d-f059fd73bc92'}], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992467', 'volume_id': 'dc4092b5-f968-4e95-b286-e9901b2a6c30', 'name': 'volume-dc4092b5-f968-4e95-b286-e9901b2a6c30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '0df15328-aebd-44c5-9c78-ee05f188ad95', 'attached_at': '', 'detached_at': '', 'volume_id': 'dc4092b5-f968-4e95-b286-e9901b2a6c30', 'serial': 'dc4092b5-f968-4e95-b286-e9901b2a6c30'}, 'delete_on_termination': False, 'attachment_id': 'e267fc57-cfe1-4c88-8ad5-689adcbc004d', 'guest_format': None, 'mount_device': '/dev/sdb', 'device_type': None, 'boot_index': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=63538) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1039.968192] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Volume attach. Driver type: vmdk {{(pid=63538) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1039.968295] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992467', 'volume_id': 'dc4092b5-f968-4e95-b286-e9901b2a6c30', 'name': 'volume-dc4092b5-f968-4e95-b286-e9901b2a6c30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '0df15328-aebd-44c5-9c78-ee05f188ad95', 'attached_at': '', 'detached_at': '', 'volume_id': 'dc4092b5-f968-4e95-b286-e9901b2a6c30', 'serial': 'dc4092b5-f968-4e95-b286-e9901b2a6c30'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1039.970835] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c5924c0-c5c2-428d-99dc-2ae5b152544f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.973773] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 1039.973773] env[63538]: value = "task-5101505" [ 1039.973773] env[63538]: _type = "Task" [ 1039.973773] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.992319] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78cd0407-57e9-4228-ab1d-18e78ba9d92c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.999514] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101505, 'name': Rename_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.024393] env[63538]: DEBUG oslo_concurrency.lockutils [req-40ea8b8b-709a-42ee-85ef-188bb72d78a0 req-5774a1ed-37f3-410f-bbe0-9de5c4b8ff53 service nova] Releasing lock "refresh_cache-209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1040.037634] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] volume-dc4092b5-f968-4e95-b286-e9901b2a6c30/volume-dc4092b5-f968-4e95-b286-e9901b2a6c30.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1040.037934] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec23391e-92f9-4f61-8a7a-7ad64bda3048 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.060365] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1040.060365] env[63538]: value = "task-5101506" [ 1040.060365] env[63538]: _type = "Task" [ 1040.060365] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.072640] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101506, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.183479] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101504, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.212423] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cd637a06-63df-4fdc-9b75-5431d8541344 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "0a0d0372-dede-4df0-bb9e-231e8a5b3742" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.348s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.369241] env[63538]: DEBUG oslo_vmware.api [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101498, 'name': ReconfigVM_Task, 'duration_secs': 1.696626} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.369862] env[63538]: DEBUG oslo_concurrency.lockutils [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1040.370137] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Reconfigured VM to attach interface {{(pid=63538) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1943}} [ 1040.449154] env[63538]: DEBUG oslo_vmware.api [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101499, 'name': PowerOnVM_Task, 'duration_secs': 1.546608} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.449442] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1040.449678] env[63538]: DEBUG nova.compute.manager [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1040.452073] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44456f8-c026-4fcc-8eaf-8ebd4c3c1a38 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.455656] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6403c550-37df-4417-bb93-e16c6f50e0d2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.464939] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-241e136f-6e10-4e27-b216-3cdd0d85c477 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.510062] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-313e773f-37f8-49bf-90de-715675a6474f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.520403] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101505, 'name': Rename_Task, 'duration_secs': 0.178321} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.523018] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1040.523620] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f0669ba9-4af1-48d9-9cee-a67cfe30613f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.526065] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca2296da-2160-45dc-b0a3-4b329fd8cf2c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.541457] env[63538]: DEBUG nova.compute.provider_tree [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1040.544155] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 1040.544155] env[63538]: value = "task-5101507" [ 1040.544155] env[63538]: _type = "Task" [ 1040.544155] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.553663] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101507, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.571509] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101506, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.681824] env[63538]: DEBUG oslo_vmware.api [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101504, 'name': PowerOnVM_Task, 'duration_secs': 0.543412} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.682139] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1040.682779] env[63538]: INFO nova.compute.manager [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Took 11.63 seconds to spawn the instance on the hypervisor. [ 1040.682779] env[63538]: DEBUG nova.compute.manager [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1040.683531] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67578da4-e947-49d2-a95c-288118218b01 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.882292] env[63538]: DEBUG oslo_concurrency.lockutils [None req-af3ec12e-4838-4184-a26f-ae7cb35dbc54 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "interface-209c5f46-9c63-4f55-bc75-bc2e4da989ac-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.959s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.957231] env[63538]: DEBUG nova.network.neutron [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Successfully updated port: 3d218127-a216-4abc-8a2a-b2e004d627cc {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1040.986411] env[63538]: DEBUG oslo_concurrency.lockutils [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.005043] env[63538]: DEBUG nova.compute.manager [req-902f7f65-131a-4f58-92d6-594d76a03ab1 req-8718220d-b436-40ca-b16b-6a147c271750 service nova] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Received event network-vif-plugged-3d218127-a216-4abc-8a2a-b2e004d627cc {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1041.005043] env[63538]: DEBUG oslo_concurrency.lockutils [req-902f7f65-131a-4f58-92d6-594d76a03ab1 req-8718220d-b436-40ca-b16b-6a147c271750 service nova] Acquiring lock "7ee64b60-9b88-4710-a477-e984fa36a142-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.005043] env[63538]: DEBUG oslo_concurrency.lockutils [req-902f7f65-131a-4f58-92d6-594d76a03ab1 req-8718220d-b436-40ca-b16b-6a147c271750 service nova] Lock "7ee64b60-9b88-4710-a477-e984fa36a142-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.005043] env[63538]: DEBUG oslo_concurrency.lockutils [req-902f7f65-131a-4f58-92d6-594d76a03ab1 req-8718220d-b436-40ca-b16b-6a147c271750 service nova] Lock "7ee64b60-9b88-4710-a477-e984fa36a142-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.005043] env[63538]: DEBUG nova.compute.manager [req-902f7f65-131a-4f58-92d6-594d76a03ab1 req-8718220d-b436-40ca-b16b-6a147c271750 service nova] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] No waiting events found dispatching network-vif-plugged-3d218127-a216-4abc-8a2a-b2e004d627cc {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1041.005043] env[63538]: WARNING nova.compute.manager [req-902f7f65-131a-4f58-92d6-594d76a03ab1 req-8718220d-b436-40ca-b16b-6a147c271750 service nova] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Received unexpected event network-vif-plugged-3d218127-a216-4abc-8a2a-b2e004d627cc for instance with vm_state building and task_state spawning. [ 1041.048252] env[63538]: DEBUG nova.scheduler.client.report [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1041.064174] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101507, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.077686] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101506, 'name': ReconfigVM_Task, 'duration_secs': 0.648376} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.080315] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Reconfigured VM instance instance-00000047 to attach disk [datastore1] volume-dc4092b5-f968-4e95-b286-e9901b2a6c30/volume-dc4092b5-f968-4e95-b286-e9901b2a6c30.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1041.087079] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c5c37ed-c648-4cdc-b816-f72518c92664 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.106549] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1041.106549] env[63538]: value = "task-5101508" [ 1041.106549] env[63538]: _type = "Task" [ 1041.106549] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.117494] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101508, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.141506] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "de68a921-bf67-4794-923d-4e062d8ff802" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.141842] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "de68a921-bf67-4794-923d-4e062d8ff802" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.142168] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "de68a921-bf67-4794-923d-4e062d8ff802-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.143834] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "de68a921-bf67-4794-923d-4e062d8ff802-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.002s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.144053] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "de68a921-bf67-4794-923d-4e062d8ff802-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.148967] env[63538]: INFO nova.compute.manager [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Terminating instance [ 1041.153331] env[63538]: DEBUG nova.compute.manager [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1041.153606] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1041.154472] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c40b811-6f9e-490a-93cf-dc155b20106e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.166557] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1041.166866] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-912f5919-9fa2-4e73-a561-125aa72015cf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.174810] env[63538]: DEBUG oslo_vmware.api [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 1041.174810] env[63538]: value = "task-5101509" [ 1041.174810] env[63538]: _type = "Task" [ 1041.174810] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.185826] env[63538]: DEBUG oslo_vmware.api [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101509, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.204054] env[63538]: INFO nova.compute.manager [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Took 32.05 seconds to build instance. [ 1041.210685] env[63538]: DEBUG oslo_concurrency.lockutils [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "cbd40984-29b6-4ed9-8c87-9fd4c80f6f13" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.210685] env[63538]: DEBUG oslo_concurrency.lockutils [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "cbd40984-29b6-4ed9-8c87-9fd4c80f6f13" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.461234] env[63538]: DEBUG oslo_concurrency.lockutils [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "refresh_cache-7ee64b60-9b88-4710-a477-e984fa36a142" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1041.461414] env[63538]: DEBUG oslo_concurrency.lockutils [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquired lock "refresh_cache-7ee64b60-9b88-4710-a477-e984fa36a142" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.461573] env[63538]: DEBUG nova.network.neutron [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1041.560162] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.891s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.561177] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.747s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.563035] env[63538]: INFO nova.compute.claims [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1041.572242] env[63538]: DEBUG oslo_vmware.api [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101507, 'name': PowerOnVM_Task, 'duration_secs': 0.952627} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.572242] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1041.572242] env[63538]: INFO nova.compute.manager [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Took 10.11 seconds to spawn the instance on the hypervisor. [ 1041.572458] env[63538]: DEBUG nova.compute.manager [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1041.573302] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47c7a8f6-0dbf-4cc3-8977-a3bc0ed6c67e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.598589] env[63538]: INFO nova.scheduler.client.report [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Deleted allocations for instance 90e56075-0d77-467f-90be-913315b63b33 [ 1041.618673] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101508, 'name': ReconfigVM_Task, 'duration_secs': 0.250566} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.618673] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992467', 'volume_id': 'dc4092b5-f968-4e95-b286-e9901b2a6c30', 'name': 'volume-dc4092b5-f968-4e95-b286-e9901b2a6c30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '0df15328-aebd-44c5-9c78-ee05f188ad95', 'attached_at': '', 'detached_at': '', 'volume_id': 'dc4092b5-f968-4e95-b286-e9901b2a6c30', 'serial': 'dc4092b5-f968-4e95-b286-e9901b2a6c30'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1041.619238] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7b2310e4-e501-4f9d-8576-e0dd85cce1c9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.629269] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1041.629269] env[63538]: value = "task-5101510" [ 1041.629269] env[63538]: _type = "Task" [ 1041.629269] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.647345] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101510, 'name': Rename_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.688849] env[63538]: DEBUG oslo_vmware.api [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101509, 'name': PowerOffVM_Task, 'duration_secs': 0.253587} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.689186] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1041.691922] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1041.691922] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-040563e1-768b-42d3-be59-edd052fecf6d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.706779] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2263d278-81d0-47f2-9e9b-44c15d13bd0e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.563s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.713312] env[63538]: DEBUG nova.compute.manager [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1041.775577] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1041.775759] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1041.775976] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Deleting the datastore file [datastore1] de68a921-bf67-4794-923d-4e062d8ff802 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1041.776297] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9321cba4-34e4-4950-918b-ca1b0744886c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.786349] env[63538]: DEBUG oslo_vmware.api [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 1041.786349] env[63538]: value = "task-5101512" [ 1041.786349] env[63538]: _type = "Task" [ 1041.786349] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.797543] env[63538]: DEBUG oslo_vmware.api [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101512, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.009308] env[63538]: DEBUG nova.network.neutron [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1042.095940] env[63538]: INFO nova.compute.manager [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Took 24.93 seconds to build instance. [ 1042.107699] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7b7768bc-dcc7-4a77-9b8b-1202b79846fa tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "90e56075-0d77-467f-90be-913315b63b33" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.089s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.141695] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101510, 'name': Rename_Task, 'duration_secs': 0.249892} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.144744] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1042.145048] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-53df6944-3945-4d1a-ac0b-1ce564435945 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.153930] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1042.153930] env[63538]: value = "task-5101513" [ 1042.153930] env[63538]: _type = "Task" [ 1042.153930] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.165304] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101513, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.255994] env[63538]: DEBUG oslo_concurrency.lockutils [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.298360] env[63538]: DEBUG oslo_vmware.api [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101512, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.367851} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.298626] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1042.298873] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1042.299125] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1042.299327] env[63538]: INFO nova.compute.manager [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1042.300126] env[63538]: DEBUG oslo.service.loopingcall [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1042.300490] env[63538]: DEBUG nova.compute.manager [-] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1042.301104] env[63538]: DEBUG nova.network.neutron [-] [instance: de68a921-bf67-4794-923d-4e062d8ff802] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1042.322980] env[63538]: DEBUG nova.network.neutron [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Updating instance_info_cache with network_info: [{"id": "3d218127-a216-4abc-8a2a-b2e004d627cc", "address": "fa:16:3e:af:a1:a4", "network": {"id": "4b4aacee-e2db-457b-9900-6c94f101831e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1899782517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "422f50dc66ec48b7b262643390072f3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d218127-a2", "ovs_interfaceid": "3d218127-a216-4abc-8a2a-b2e004d627cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.364393] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Acquiring lock "148790a7-0a35-4d26-ae9f-6f954a161c88" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.364716] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Lock "148790a7-0a35-4d26-ae9f-6f954a161c88" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.365021] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Acquiring lock "148790a7-0a35-4d26-ae9f-6f954a161c88-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.365639] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Lock "148790a7-0a35-4d26-ae9f-6f954a161c88-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.365792] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Lock "148790a7-0a35-4d26-ae9f-6f954a161c88-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.369832] env[63538]: INFO nova.compute.manager [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Terminating instance [ 1042.372949] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Acquiring lock "refresh_cache-148790a7-0a35-4d26-ae9f-6f954a161c88" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1042.372949] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Acquired lock "refresh_cache-148790a7-0a35-4d26-ae9f-6f954a161c88" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.372949] env[63538]: DEBUG nova.network.neutron [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1042.599097] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5a3c41c8-52f0-4f82-8800-6c6cfdbaa0fb tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "7752c64f-693f-4cf3-951c-7ee0657f1682" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.444s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.665144] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101513, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.827754] env[63538]: DEBUG oslo_concurrency.lockutils [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Releasing lock "refresh_cache-7ee64b60-9b88-4710-a477-e984fa36a142" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1042.827754] env[63538]: DEBUG nova.compute.manager [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Instance network_info: |[{"id": "3d218127-a216-4abc-8a2a-b2e004d627cc", "address": "fa:16:3e:af:a1:a4", "network": {"id": "4b4aacee-e2db-457b-9900-6c94f101831e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1899782517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "422f50dc66ec48b7b262643390072f3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d218127-a2", "ovs_interfaceid": "3d218127-a216-4abc-8a2a-b2e004d627cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1042.828091] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:af:a1:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f972c061-0cd5-4aed-8cfb-42cc4a08835a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d218127-a216-4abc-8a2a-b2e004d627cc', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1042.837612] env[63538]: DEBUG oslo.service.loopingcall [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1042.840580] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1042.841270] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a2ec849-d6f1-4bc6-9581-a11a3dd725de {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.869399] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1042.869399] env[63538]: value = "task-5101514" [ 1042.869399] env[63538]: _type = "Task" [ 1042.869399] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.883656] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101514, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.904343] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c406af6-9802-458b-a99a-715eecd6e37a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.914900] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cfe4ab7-0b1d-4596-a01e-60a4cf805bdc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.921968] env[63538]: DEBUG nova.network.neutron [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1042.957945] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3abac3f8-2d44-4ca3-87bf-1993a3ab088b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.967241] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca252309-8075-410d-a8b0-974b9c6f9726 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.983090] env[63538]: DEBUG nova.compute.provider_tree [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1043.061399] env[63538]: DEBUG nova.network.neutron [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.095690] env[63538]: DEBUG nova.network.neutron [-] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.170656] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101513, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.174859] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "interface-209c5f46-9c63-4f55-bc75-bc2e4da989ac-c53fd8cc-7c0a-4245-9d76-3fc1b4ecc21f" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.175547] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "interface-209c5f46-9c63-4f55-bc75-bc2e4da989ac-c53fd8cc-7c0a-4245-9d76-3fc1b4ecc21f" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.177730] env[63538]: DEBUG nova.objects.instance [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lazy-loading 'flavor' on Instance uuid 209c5f46-9c63-4f55-bc75-bc2e4da989ac {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1043.367552] env[63538]: DEBUG nova.compute.manager [req-3a90e92f-2d23-445f-b563-fb277bd6cc38 req-e31a898b-ee37-4ef5-be62-4a698fadfaa3 service nova] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Received event network-vif-deleted-ca7d9759-5177-4ea2-a411-3bae4181182d {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1043.379208] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101514, 'name': CreateVM_Task, 'duration_secs': 0.39968} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.379208] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1043.380186] env[63538]: DEBUG oslo_concurrency.lockutils [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1043.380571] env[63538]: DEBUG oslo_concurrency.lockutils [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.381073] env[63538]: DEBUG oslo_concurrency.lockutils [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1043.381498] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37ee8d18-62aa-4e94-b931-e32028ad8426 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.388741] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 1043.388741] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525dda3d-3657-f42d-39c4-654fb2a63365" [ 1043.388741] env[63538]: _type = "Task" [ 1043.388741] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.397639] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525dda3d-3657-f42d-39c4-654fb2a63365, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.485492] env[63538]: DEBUG nova.scheduler.client.report [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1043.564710] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Releasing lock "refresh_cache-148790a7-0a35-4d26-ae9f-6f954a161c88" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1043.564710] env[63538]: DEBUG nova.compute.manager [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1043.566751] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1043.570020] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fcd9a34-080e-461b-8225-47baca67cffa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.578988] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1043.578988] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-451ae162-3cdb-45be-9fbc-71532b071c9f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.583679] env[63538]: DEBUG oslo_vmware.api [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Waiting for the task: (returnval){ [ 1043.583679] env[63538]: value = "task-5101515" [ 1043.583679] env[63538]: _type = "Task" [ 1043.583679] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.592481] env[63538]: DEBUG oslo_vmware.api [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101515, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.599283] env[63538]: INFO nova.compute.manager [-] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Took 1.30 seconds to deallocate network for instance. [ 1043.665923] env[63538]: DEBUG oslo_vmware.api [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101513, 'name': PowerOnVM_Task, 'duration_secs': 1.02141} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.666352] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1043.772506] env[63538]: DEBUG nova.compute.manager [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1043.773605] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5cc888f-2b90-4f94-afd8-9cd815ef86dc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.820412] env[63538]: DEBUG nova.compute.manager [req-33282de1-20ad-4ea6-bf55-696851ca28d4 req-8a8d1352-401c-412d-93af-f80f6413a62e service nova] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Received event network-changed-3d218127-a216-4abc-8a2a-b2e004d627cc {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1043.820762] env[63538]: DEBUG nova.compute.manager [req-33282de1-20ad-4ea6-bf55-696851ca28d4 req-8a8d1352-401c-412d-93af-f80f6413a62e service nova] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Refreshing instance network info cache due to event network-changed-3d218127-a216-4abc-8a2a-b2e004d627cc. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1043.821013] env[63538]: DEBUG oslo_concurrency.lockutils [req-33282de1-20ad-4ea6-bf55-696851ca28d4 req-8a8d1352-401c-412d-93af-f80f6413a62e service nova] Acquiring lock "refresh_cache-7ee64b60-9b88-4710-a477-e984fa36a142" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1043.821190] env[63538]: DEBUG oslo_concurrency.lockutils [req-33282de1-20ad-4ea6-bf55-696851ca28d4 req-8a8d1352-401c-412d-93af-f80f6413a62e service nova] Acquired lock "refresh_cache-7ee64b60-9b88-4710-a477-e984fa36a142" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.821364] env[63538]: DEBUG nova.network.neutron [req-33282de1-20ad-4ea6-bf55-696851ca28d4 req-8a8d1352-401c-412d-93af-f80f6413a62e service nova] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Refreshing network info cache for port 3d218127-a216-4abc-8a2a-b2e004d627cc {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1043.896854] env[63538]: DEBUG nova.objects.instance [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lazy-loading 'pci_requests' on Instance uuid 209c5f46-9c63-4f55-bc75-bc2e4da989ac {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1043.903364] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525dda3d-3657-f42d-39c4-654fb2a63365, 'name': SearchDatastore_Task, 'duration_secs': 0.03786} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.903971] env[63538]: DEBUG oslo_concurrency.lockutils [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1043.904357] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1043.904621] env[63538]: DEBUG oslo_concurrency.lockutils [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1043.904766] env[63538]: DEBUG oslo_concurrency.lockutils [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.904962] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1043.908018] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6cf402ac-6284-4c95-bce2-8ce68f5d1285 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.918714] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1043.918873] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1043.919622] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eebbbeda-eb43-4a5a-a3ed-50f31c04536c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.925371] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 1043.925371] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5260c609-f1e4-dd49-cf38-09c1c894dc54" [ 1043.925371] env[63538]: _type = "Task" [ 1043.925371] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.934062] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5260c609-f1e4-dd49-cf38-09c1c894dc54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.976376] env[63538]: DEBUG nova.compute.manager [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Stashing vm_state: active {{(pid=63538) _prep_resize /opt/stack/nova/nova/compute/manager.py:5670}} [ 1043.993039] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.432s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.993679] env[63538]: DEBUG nova.compute.manager [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1043.996529] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.401s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.996639] env[63538]: DEBUG nova.objects.instance [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lazy-loading 'resources' on Instance uuid 8ed0bd15-71fc-435e-9e4a-90b023ad8a79 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1044.095186] env[63538]: DEBUG oslo_vmware.api [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101515, 'name': PowerOffVM_Task, 'duration_secs': 0.252435} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.095607] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1044.095703] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1044.096177] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0426b4cc-2c68-4967-abf4-05281dcdbc59 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.109653] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.124694] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1044.124694] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1044.124920] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Deleting the datastore file [datastore2] 148790a7-0a35-4d26-ae9f-6f954a161c88 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1044.125309] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-803adb5f-14c9-4329-9608-aa555df3ed0d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.134385] env[63538]: DEBUG oslo_vmware.api [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Waiting for the task: (returnval){ [ 1044.134385] env[63538]: value = "task-5101517" [ 1044.134385] env[63538]: _type = "Task" [ 1044.134385] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.148691] env[63538]: DEBUG oslo_vmware.api [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101517, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.294346] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9ba748d6-e0d1-4220-8636-fda58e703c37 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "0df15328-aebd-44c5-9c78-ee05f188ad95" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 50.549s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.405229] env[63538]: DEBUG nova.objects.base [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Object Instance<209c5f46-9c63-4f55-bc75-bc2e4da989ac> lazy-loaded attributes: flavor,pci_requests {{(pid=63538) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1044.405496] env[63538]: DEBUG nova.network.neutron [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1044.451034] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5260c609-f1e4-dd49-cf38-09c1c894dc54, 'name': SearchDatastore_Task, 'duration_secs': 0.034232} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.451034] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a51e6761-1590-4642-addf-4c897646bcf5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.455574] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 1044.455574] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5291f262-4efb-feca-6dfc-43b8289b9cad" [ 1044.455574] env[63538]: _type = "Task" [ 1044.455574] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.464264] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5291f262-4efb-feca-6dfc-43b8289b9cad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.490025] env[63538]: DEBUG nova.policy [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ebe77c0e32ce4a32b290ba5088e107f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7063c42297c24f2baf7271fa25dec927', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1044.499770] env[63538]: DEBUG nova.compute.utils [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1044.507353] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.508430] env[63538]: DEBUG nova.compute.manager [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1044.508747] env[63538]: DEBUG nova.network.neutron [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1044.647406] env[63538]: DEBUG oslo_vmware.api [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Task: {'id': task-5101517, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165781} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.649130] env[63538]: DEBUG nova.policy [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f315670d336b49d6a732297656ce515a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df090f9a727d4cf4a0f466e27928bdc6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1044.654944] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1044.654944] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1044.654944] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1044.654944] env[63538]: INFO nova.compute.manager [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1044.654944] env[63538]: DEBUG oslo.service.loopingcall [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1044.655484] env[63538]: DEBUG nova.compute.manager [-] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1044.655572] env[63538]: DEBUG nova.network.neutron [-] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1044.677971] env[63538]: DEBUG nova.network.neutron [-] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1044.777748] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f45353cd-27fe-4293-b52d-09fa7e8416d5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.789023] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d3bd3ce-a1ab-423f-ac15-eb8e8c528707 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.817648] env[63538]: DEBUG nova.network.neutron [req-33282de1-20ad-4ea6-bf55-696851ca28d4 req-8a8d1352-401c-412d-93af-f80f6413a62e service nova] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Updated VIF entry in instance network info cache for port 3d218127-a216-4abc-8a2a-b2e004d627cc. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1044.818013] env[63538]: DEBUG nova.network.neutron [req-33282de1-20ad-4ea6-bf55-696851ca28d4 req-8a8d1352-401c-412d-93af-f80f6413a62e service nova] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Updating instance_info_cache with network_info: [{"id": "3d218127-a216-4abc-8a2a-b2e004d627cc", "address": "fa:16:3e:af:a1:a4", "network": {"id": "4b4aacee-e2db-457b-9900-6c94f101831e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1899782517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "422f50dc66ec48b7b262643390072f3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d218127-a2", "ovs_interfaceid": "3d218127-a216-4abc-8a2a-b2e004d627cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.819914] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e60525c-9dfa-4f23-8a7d-85791f7d87d9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.835733] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09383f14-8e55-4a87-93cb-0ec12cba4179 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.856371] env[63538]: DEBUG nova.compute.provider_tree [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1044.967625] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5291f262-4efb-feca-6dfc-43b8289b9cad, 'name': SearchDatastore_Task, 'duration_secs': 0.025901} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.969680] env[63538]: DEBUG oslo_concurrency.lockutils [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1044.969680] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 7ee64b60-9b88-4710-a477-e984fa36a142/7ee64b60-9b88-4710-a477-e984fa36a142.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1044.969680] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5611641-0fc7-4790-8e6e-09109d354479 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.977054] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 1044.977054] env[63538]: value = "task-5101518" [ 1044.977054] env[63538]: _type = "Task" [ 1044.977054] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.985951] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101518, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.009351] env[63538]: DEBUG nova.compute.manager [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1045.385306] env[63538]: DEBUG nova.network.neutron [-] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.387377] env[63538]: DEBUG oslo_concurrency.lockutils [req-33282de1-20ad-4ea6-bf55-696851ca28d4 req-8a8d1352-401c-412d-93af-f80f6413a62e service nova] Releasing lock "refresh_cache-7ee64b60-9b88-4710-a477-e984fa36a142" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1045.387377] env[63538]: DEBUG nova.scheduler.client.report [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1045.489474] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101518, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.503137] env[63538]: DEBUG nova.network.neutron [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Successfully created port: da8361d9-226a-4b83-967f-41c85a0d4920 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1045.892237] env[63538]: INFO nova.compute.manager [-] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Took 1.24 seconds to deallocate network for instance. [ 1045.893010] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.897s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.897356] env[63538]: DEBUG oslo_concurrency.lockutils [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 4.911s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.898291] env[63538]: DEBUG nova.objects.instance [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63538) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1045.924088] env[63538]: INFO nova.scheduler.client.report [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Deleted allocations for instance 8ed0bd15-71fc-435e-9e4a-90b023ad8a79 [ 1045.988236] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101518, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.731601} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.989390] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 7ee64b60-9b88-4710-a477-e984fa36a142/7ee64b60-9b88-4710-a477-e984fa36a142.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1045.989574] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1045.989844] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f9d45788-0757-4ca2-be4d-d63e89eb6fd2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.998134] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 1045.998134] env[63538]: value = "task-5101519" [ 1045.998134] env[63538]: _type = "Task" [ 1045.998134] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.010674] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101519, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.020232] env[63538]: DEBUG nova.compute.manager [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1046.093386] env[63538]: DEBUG nova.virt.hardware [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1046.093386] env[63538]: DEBUG nova.virt.hardware [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1046.093747] env[63538]: DEBUG nova.virt.hardware [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1046.094179] env[63538]: DEBUG nova.virt.hardware [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1046.094532] env[63538]: DEBUG nova.virt.hardware [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1046.094856] env[63538]: DEBUG nova.virt.hardware [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1046.095339] env[63538]: DEBUG nova.virt.hardware [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1046.095674] env[63538]: DEBUG nova.virt.hardware [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1046.096063] env[63538]: DEBUG nova.virt.hardware [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1046.096437] env[63538]: DEBUG nova.virt.hardware [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1046.096769] env[63538]: DEBUG nova.virt.hardware [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1046.097764] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b3641e2-f129-4280-bc34-b78d6049cfe5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.108410] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d1fd5ba-7797-42ae-9926-772c28611105 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.375071] env[63538]: DEBUG nova.network.neutron [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Successfully updated port: c53fd8cc-7c0a-4245-9d76-3fc1b4ecc21f {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1046.393037] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "7752c64f-693f-4cf3-951c-7ee0657f1682" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.393354] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "7752c64f-693f-4cf3-951c-7ee0657f1682" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.393534] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "7752c64f-693f-4cf3-951c-7ee0657f1682-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.393729] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "7752c64f-693f-4cf3-951c-7ee0657f1682-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.394482] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "7752c64f-693f-4cf3-951c-7ee0657f1682-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.397371] env[63538]: INFO nova.compute.manager [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Terminating instance [ 1046.401253] env[63538]: DEBUG nova.compute.manager [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1046.401526] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1046.406672] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcccf6e4-3239-418f-8444-15d5cd48b829 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.411540] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.418333] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1046.418494] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-236de9f8-9ee5-4019-b0d9-f4b3a935263d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.426970] env[63538]: DEBUG oslo_vmware.api [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 1046.426970] env[63538]: value = "task-5101520" [ 1046.426970] env[63538]: _type = "Task" [ 1046.426970] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.434744] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff02fbfb-d1d4-43b3-b5b0-0f5042260055 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "8ed0bd15-71fc-435e-9e4a-90b023ad8a79" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.563s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.443392] env[63538]: DEBUG oslo_vmware.api [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101520, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.510537] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101519, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080737} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.510825] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1046.511714] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b48943-4185-462c-85fc-6bb57ffd5276 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.536061] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 7ee64b60-9b88-4710-a477-e984fa36a142/7ee64b60-9b88-4710-a477-e984fa36a142.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1046.536413] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5d2493f-184b-4d7a-9fbb-c0adbb463393 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.558319] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 1046.558319] env[63538]: value = "task-5101521" [ 1046.558319] env[63538]: _type = "Task" [ 1046.558319] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.568524] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101521, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.730860] env[63538]: DEBUG oslo_concurrency.lockutils [None req-240a10d1-a4e7-4a60-8a29-f467e39010ec tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "431a67e6-b90d-4930-9a86-7c49d1022ddc" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.731138] env[63538]: DEBUG oslo_concurrency.lockutils [None req-240a10d1-a4e7-4a60-8a29-f467e39010ec tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "431a67e6-b90d-4930-9a86-7c49d1022ddc" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.878288] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "refresh_cache-209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.878525] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "refresh_cache-209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.878747] env[63538]: DEBUG nova.network.neutron [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1046.912961] env[63538]: DEBUG oslo_concurrency.lockutils [None req-99ff6d4a-000c-4980-b58e-9aa3650ab138 tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.914355] env[63538]: DEBUG oslo_concurrency.lockutils [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.659s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.916315] env[63538]: INFO nova.compute.claims [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1046.937602] env[63538]: DEBUG oslo_vmware.api [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101520, 'name': PowerOffVM_Task, 'duration_secs': 0.461889} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.938217] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1046.938217] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1046.938389] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-67c8a2ad-6f0d-42ce-9768-9737c72bc59b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.048207] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1047.048483] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1047.048862] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Deleting the datastore file [datastore2] 7752c64f-693f-4cf3-951c-7ee0657f1682 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1047.049083] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0122c3c0-659c-4f73-a4f4-7cd59d9e3d96 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.057408] env[63538]: DEBUG oslo_vmware.api [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 1047.057408] env[63538]: value = "task-5101523" [ 1047.057408] env[63538]: _type = "Task" [ 1047.057408] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.069941] env[63538]: DEBUG oslo_vmware.api [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101523, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.073242] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101521, 'name': ReconfigVM_Task, 'duration_secs': 0.327916} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.073361] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 7ee64b60-9b88-4710-a477-e984fa36a142/7ee64b60-9b88-4710-a477-e984fa36a142.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1047.073973] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fb2cf238-8967-4571-ba9c-f71f86aa1c7e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.083241] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 1047.083241] env[63538]: value = "task-5101524" [ 1047.083241] env[63538]: _type = "Task" [ 1047.083241] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.092083] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101524, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.184406] env[63538]: DEBUG nova.network.neutron [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Successfully updated port: da8361d9-226a-4b83-967f-41c85a0d4920 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1047.234507] env[63538]: DEBUG nova.compute.utils [None req-240a10d1-a4e7-4a60-8a29-f467e39010ec tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1047.420941] env[63538]: WARNING nova.network.neutron [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] 44a04a43-a979-4648-89b2-63323df5b0f3 already exists in list: networks containing: ['44a04a43-a979-4648-89b2-63323df5b0f3']. ignoring it [ 1047.421247] env[63538]: WARNING nova.network.neutron [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] 44a04a43-a979-4648-89b2-63323df5b0f3 already exists in list: networks containing: ['44a04a43-a979-4648-89b2-63323df5b0f3']. ignoring it [ 1047.568988] env[63538]: DEBUG oslo_vmware.api [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101523, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.396795} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.569298] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1047.569497] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1047.569685] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1047.569882] env[63538]: INFO nova.compute.manager [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1047.570199] env[63538]: DEBUG oslo.service.loopingcall [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1047.570427] env[63538]: DEBUG nova.compute.manager [-] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1047.570535] env[63538]: DEBUG nova.network.neutron [-] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1047.599138] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101524, 'name': Rename_Task, 'duration_secs': 0.148495} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.599455] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1047.599722] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-33957ff8-403a-4f2d-925c-76f088dfd676 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.612654] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 1047.612654] env[63538]: value = "task-5101525" [ 1047.612654] env[63538]: _type = "Task" [ 1047.612654] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.622736] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101525, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.686619] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "refresh_cache-5ef5fe70-fed9-4b3d-9d43-f01cf628d9af" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1047.686792] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquired lock "refresh_cache-5ef5fe70-fed9-4b3d-9d43-f01cf628d9af" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.686914] env[63538]: DEBUG nova.network.neutron [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1047.737431] env[63538]: DEBUG oslo_concurrency.lockutils [None req-240a10d1-a4e7-4a60-8a29-f467e39010ec tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "431a67e6-b90d-4930-9a86-7c49d1022ddc" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.127762] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101525, 'name': PowerOnVM_Task} progress is 92%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.185982] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ecc9109-66c4-4bee-b6ab-f4b0bc5a458a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.196101] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f97277b9-06a0-4dd0-9004-855b85b82add {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.201074] env[63538]: DEBUG nova.network.neutron [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Updating instance_info_cache with network_info: [{"id": "8240a40a-4486-4213-ac28-8eee15d652a8", "address": "fa:16:3e:41:82:69", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8240a40a-44", "ovs_interfaceid": "8240a40a-4486-4213-ac28-8eee15d652a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3d1c322d-13b3-4f3d-a880-d456b548938f", "address": "fa:16:3e:10:ab:9e", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d1c322d-13", "ovs_interfaceid": "3d1c322d-13b3-4f3d-a880-d456b548938f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c53fd8cc-7c0a-4245-9d76-3fc1b4ecc21f", "address": "fa:16:3e:85:42:c6", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc53fd8cc-7c", "ovs_interfaceid": "c53fd8cc-7c0a-4245-9d76-3fc1b4ecc21f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.236510] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43f1f123-e2cb-41e0-a733-208600a2f7ac {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.241725] env[63538]: DEBUG nova.network.neutron [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1048.249383] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce6181b3-2aae-4d10-87f3-389f54106541 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.265892] env[63538]: DEBUG nova.compute.provider_tree [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1048.351931] env[63538]: DEBUG nova.compute.manager [req-91d2a2ec-20ae-498b-a65c-bdab47ec7fca req-8db50cd5-ca9d-4eb4-9d9c-7ff95effd20f service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Received event network-vif-plugged-c53fd8cc-7c0a-4245-9d76-3fc1b4ecc21f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1048.352232] env[63538]: DEBUG oslo_concurrency.lockutils [req-91d2a2ec-20ae-498b-a65c-bdab47ec7fca req-8db50cd5-ca9d-4eb4-9d9c-7ff95effd20f service nova] Acquiring lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.352855] env[63538]: DEBUG oslo_concurrency.lockutils [req-91d2a2ec-20ae-498b-a65c-bdab47ec7fca req-8db50cd5-ca9d-4eb4-9d9c-7ff95effd20f service nova] Lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.353232] env[63538]: DEBUG oslo_concurrency.lockutils [req-91d2a2ec-20ae-498b-a65c-bdab47ec7fca req-8db50cd5-ca9d-4eb4-9d9c-7ff95effd20f service nova] Lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.353893] env[63538]: DEBUG nova.compute.manager [req-91d2a2ec-20ae-498b-a65c-bdab47ec7fca req-8db50cd5-ca9d-4eb4-9d9c-7ff95effd20f service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] No waiting events found dispatching network-vif-plugged-c53fd8cc-7c0a-4245-9d76-3fc1b4ecc21f {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1048.353893] env[63538]: WARNING nova.compute.manager [req-91d2a2ec-20ae-498b-a65c-bdab47ec7fca req-8db50cd5-ca9d-4eb4-9d9c-7ff95effd20f service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Received unexpected event network-vif-plugged-c53fd8cc-7c0a-4245-9d76-3fc1b4ecc21f for instance with vm_state active and task_state None. [ 1048.360378] env[63538]: DEBUG nova.network.neutron [-] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.427549] env[63538]: DEBUG nova.network.neutron [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Updating instance_info_cache with network_info: [{"id": "da8361d9-226a-4b83-967f-41c85a0d4920", "address": "fa:16:3e:99:9a:a9", "network": {"id": "4d592811-793b-45d6-a6d3-fff8ca7bbf30", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-654803980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df090f9a727d4cf4a0f466e27928bdc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda8361d9-22", "ovs_interfaceid": "da8361d9-226a-4b83-967f-41c85a0d4920", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.629075] env[63538]: DEBUG oslo_vmware.api [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101525, 'name': PowerOnVM_Task, 'duration_secs': 0.567024} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.629372] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1048.629588] env[63538]: INFO nova.compute.manager [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Took 8.92 seconds to spawn the instance on the hypervisor. [ 1048.629776] env[63538]: DEBUG nova.compute.manager [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1048.630691] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f33d328-b4f6-481a-98c7-e749ab0690dc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.705031] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "refresh_cache-209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1048.705635] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1048.705842] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.706731] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6589d24-e56a-478c-9811-decc1f3f7c9d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.727173] env[63538]: DEBUG nova.virt.hardware [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1048.727488] env[63538]: DEBUG nova.virt.hardware [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1048.727662] env[63538]: DEBUG nova.virt.hardware [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1048.727855] env[63538]: DEBUG nova.virt.hardware [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1048.728028] env[63538]: DEBUG nova.virt.hardware [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1048.728197] env[63538]: DEBUG nova.virt.hardware [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1048.728427] env[63538]: DEBUG nova.virt.hardware [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1048.728604] env[63538]: DEBUG nova.virt.hardware [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1048.728802] env[63538]: DEBUG nova.virt.hardware [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1048.729000] env[63538]: DEBUG nova.virt.hardware [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1048.729262] env[63538]: DEBUG nova.virt.hardware [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1048.736709] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Reconfiguring VM to attach interface {{(pid=63538) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1929}} [ 1048.737501] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce861c1e-dae0-414d-b13c-3b148d4fe50b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.756292] env[63538]: DEBUG oslo_vmware.api [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1048.756292] env[63538]: value = "task-5101526" [ 1048.756292] env[63538]: _type = "Task" [ 1048.756292] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.765115] env[63538]: DEBUG oslo_vmware.api [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101526, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.771863] env[63538]: DEBUG nova.scheduler.client.report [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1048.829817] env[63538]: DEBUG oslo_concurrency.lockutils [None req-240a10d1-a4e7-4a60-8a29-f467e39010ec tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "431a67e6-b90d-4930-9a86-7c49d1022ddc" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.830137] env[63538]: DEBUG oslo_concurrency.lockutils [None req-240a10d1-a4e7-4a60-8a29-f467e39010ec tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "431a67e6-b90d-4930-9a86-7c49d1022ddc" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.830392] env[63538]: INFO nova.compute.manager [None req-240a10d1-a4e7-4a60-8a29-f467e39010ec tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Attaching volume d09b8ebf-5a95-4a50-ba17-6eaf05d0b995 to /dev/sdb [ 1048.862888] env[63538]: INFO nova.compute.manager [-] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Took 1.29 seconds to deallocate network for instance. [ 1048.879738] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5f88df-7d10-4da5-8bfd-a2a2b77c6cd9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.888190] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-955320ea-84bd-4894-9f6d-44bbb0cf0612 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.906133] env[63538]: DEBUG nova.virt.block_device [None req-240a10d1-a4e7-4a60-8a29-f467e39010ec tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Updating existing volume attachment record: 01b51281-d0de-468a-af4f-cd5565eed3b5 {{(pid=63538) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1048.912023] env[63538]: DEBUG nova.compute.manager [req-26532950-9400-46e5-8e0d-d6e583a5d4bf req-a6600363-07b5-497c-b050-6bb0f7916dc8 service nova] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Received event network-vif-plugged-da8361d9-226a-4b83-967f-41c85a0d4920 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1048.912023] env[63538]: DEBUG oslo_concurrency.lockutils [req-26532950-9400-46e5-8e0d-d6e583a5d4bf req-a6600363-07b5-497c-b050-6bb0f7916dc8 service nova] Acquiring lock "5ef5fe70-fed9-4b3d-9d43-f01cf628d9af-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.912268] env[63538]: DEBUG oslo_concurrency.lockutils [req-26532950-9400-46e5-8e0d-d6e583a5d4bf req-a6600363-07b5-497c-b050-6bb0f7916dc8 service nova] Lock "5ef5fe70-fed9-4b3d-9d43-f01cf628d9af-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.912505] env[63538]: DEBUG oslo_concurrency.lockutils [req-26532950-9400-46e5-8e0d-d6e583a5d4bf req-a6600363-07b5-497c-b050-6bb0f7916dc8 service nova] Lock "5ef5fe70-fed9-4b3d-9d43-f01cf628d9af-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.912627] env[63538]: DEBUG nova.compute.manager [req-26532950-9400-46e5-8e0d-d6e583a5d4bf req-a6600363-07b5-497c-b050-6bb0f7916dc8 service nova] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] No waiting events found dispatching network-vif-plugged-da8361d9-226a-4b83-967f-41c85a0d4920 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1048.912864] env[63538]: WARNING nova.compute.manager [req-26532950-9400-46e5-8e0d-d6e583a5d4bf req-a6600363-07b5-497c-b050-6bb0f7916dc8 service nova] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Received unexpected event network-vif-plugged-da8361d9-226a-4b83-967f-41c85a0d4920 for instance with vm_state building and task_state spawning. [ 1048.930729] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Releasing lock "refresh_cache-5ef5fe70-fed9-4b3d-9d43-f01cf628d9af" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1048.931098] env[63538]: DEBUG nova.compute.manager [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Instance network_info: |[{"id": "da8361d9-226a-4b83-967f-41c85a0d4920", "address": "fa:16:3e:99:9a:a9", "network": {"id": "4d592811-793b-45d6-a6d3-fff8ca7bbf30", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-654803980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df090f9a727d4cf4a0f466e27928bdc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda8361d9-22", "ovs_interfaceid": "da8361d9-226a-4b83-967f-41c85a0d4920", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1048.931628] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:9a:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c621a9c-66f5-426a-8aab-bd8b2e912106', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'da8361d9-226a-4b83-967f-41c85a0d4920', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1048.940216] env[63538]: DEBUG oslo.service.loopingcall [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1048.940808] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1048.941141] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3e02c149-5ca8-4d78-b55c-5e0fc9ac432b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.963998] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1048.963998] env[63538]: value = "task-5101527" [ 1048.963998] env[63538]: _type = "Task" [ 1048.963998] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.973429] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101527, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.152044] env[63538]: INFO nova.compute.manager [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Took 21.65 seconds to build instance. [ 1049.273120] env[63538]: DEBUG oslo_vmware.api [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101526, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.277744] env[63538]: DEBUG oslo_concurrency.lockutils [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.363s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.278701] env[63538]: DEBUG nova.compute.manager [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1049.285006] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.174s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.285006] env[63538]: DEBUG nova.objects.instance [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lazy-loading 'resources' on Instance uuid de68a921-bf67-4794-923d-4e062d8ff802 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1049.371710] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.475799] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101527, 'name': CreateVM_Task, 'duration_secs': 0.401249} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.476146] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1049.477038] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1049.477277] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.477812] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1049.478097] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79beb7d9-639e-4c51-9eb4-c4bdf8d6996a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.485115] env[63538]: DEBUG oslo_vmware.api [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1049.485115] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52460f75-e4c3-f31c-0c72-ac8c63faa39d" [ 1049.485115] env[63538]: _type = "Task" [ 1049.485115] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.497324] env[63538]: DEBUG oslo_vmware.api [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52460f75-e4c3-f31c-0c72-ac8c63faa39d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.654327] env[63538]: DEBUG oslo_concurrency.lockutils [None req-591d0945-4288-4bbe-ad02-ffa0a51b6274 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "7ee64b60-9b88-4710-a477-e984fa36a142" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.156s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.769800] env[63538]: DEBUG oslo_vmware.api [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101526, 'name': ReconfigVM_Task, 'duration_secs': 0.862559} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.770475] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1049.770679] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Reconfigured VM to attach interface {{(pid=63538) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1943}} [ 1049.784507] env[63538]: DEBUG nova.compute.utils [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1049.786117] env[63538]: DEBUG nova.compute.manager [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1049.786261] env[63538]: DEBUG nova.network.neutron [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1049.853341] env[63538]: DEBUG nova.policy [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '21f98d3c77454d809c0aa1ca5c7dc6d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '492427e54e1048f292dab2abdac71af5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1049.999159] env[63538]: DEBUG oslo_vmware.api [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52460f75-e4c3-f31c-0c72-ac8c63faa39d, 'name': SearchDatastore_Task, 'duration_secs': 0.012559} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.999503] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1049.999794] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1050.003019] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1050.003019] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.003019] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1050.003019] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d57318fa-15dd-4f7e-8261-8725aa4f9859 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.013354] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1050.013562] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1050.014362] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0034474b-4198-423c-b40a-89d8ec220946 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.020671] env[63538]: DEBUG oslo_vmware.api [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1050.020671] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520e4e86-4c17-69b1-8e15-0cae57d86db6" [ 1050.020671] env[63538]: _type = "Task" [ 1050.020671] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.034317] env[63538]: DEBUG oslo_vmware.api [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520e4e86-4c17-69b1-8e15-0cae57d86db6, 'name': SearchDatastore_Task, 'duration_secs': 0.010134} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.034707] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3620609-b746-40aa-81f9-6b784b4055c8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.045079] env[63538]: DEBUG oslo_vmware.api [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1050.045079] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52758ed4-098f-6f67-2a2e-e7b3c4a517ca" [ 1050.045079] env[63538]: _type = "Task" [ 1050.045079] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.053968] env[63538]: DEBUG oslo_vmware.api [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52758ed4-098f-6f67-2a2e-e7b3c4a517ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.061936] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95bf7ddc-a092-42ee-8e91-dd880f5cd4ae {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.070139] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe0ddc53-2f3f-456c-98a1-5ddb703c6796 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.101754] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fae9ecb-d83a-4d27-af6f-d13e73b9a27f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.110041] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dadb008-df22-4b7d-9550-b15b8d463695 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.125288] env[63538]: DEBUG nova.compute.provider_tree [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1050.169224] env[63538]: DEBUG nova.network.neutron [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Successfully created port: c7228e85-42e9-42ec-b707-620c264a0f37 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1050.277393] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f1a95ac6-0412-47dd-9260-606c90b6549f tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "interface-209c5f46-9c63-4f55-bc75-bc2e4da989ac-c53fd8cc-7c0a-4245-9d76-3fc1b4ecc21f" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.101s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1050.290052] env[63538]: DEBUG nova.compute.manager [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1050.556406] env[63538]: DEBUG oslo_vmware.api [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52758ed4-098f-6f67-2a2e-e7b3c4a517ca, 'name': SearchDatastore_Task, 'duration_secs': 0.010881} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.556685] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1050.556980] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af/5ef5fe70-fed9-4b3d-9d43-f01cf628d9af.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1050.557273] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-12966c66-6a29-401b-903e-825f330d18a1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.564959] env[63538]: DEBUG oslo_vmware.api [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1050.564959] env[63538]: value = "task-5101529" [ 1050.564959] env[63538]: _type = "Task" [ 1050.564959] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.573824] env[63538]: DEBUG oslo_vmware.api [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101529, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.628107] env[63538]: DEBUG nova.scheduler.client.report [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1050.851007] env[63538]: DEBUG nova.compute.manager [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1050.852122] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4983c5ef-5815-4a59-a86e-9930a2db657c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.080117] env[63538]: DEBUG oslo_vmware.api [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101529, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.133332] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.850s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.136293] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 6.629s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.163538] env[63538]: INFO nova.scheduler.client.report [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Deleted allocations for instance de68a921-bf67-4794-923d-4e062d8ff802 [ 1051.283498] env[63538]: DEBUG nova.compute.manager [req-9a4a5da6-5c14-4a31-825d-63070a12ba1d req-46f80612-5651-4483-b3c1-d089ff9b3552 service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Received event network-changed-c53fd8cc-7c0a-4245-9d76-3fc1b4ecc21f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1051.283582] env[63538]: DEBUG nova.compute.manager [req-9a4a5da6-5c14-4a31-825d-63070a12ba1d req-46f80612-5651-4483-b3c1-d089ff9b3552 service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Refreshing instance network info cache due to event network-changed-c53fd8cc-7c0a-4245-9d76-3fc1b4ecc21f. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1051.283838] env[63538]: DEBUG oslo_concurrency.lockutils [req-9a4a5da6-5c14-4a31-825d-63070a12ba1d req-46f80612-5651-4483-b3c1-d089ff9b3552 service nova] Acquiring lock "refresh_cache-209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1051.284053] env[63538]: DEBUG oslo_concurrency.lockutils [req-9a4a5da6-5c14-4a31-825d-63070a12ba1d req-46f80612-5651-4483-b3c1-d089ff9b3552 service nova] Acquired lock "refresh_cache-209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.284266] env[63538]: DEBUG nova.network.neutron [req-9a4a5da6-5c14-4a31-825d-63070a12ba1d req-46f80612-5651-4483-b3c1-d089ff9b3552 service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Refreshing network info cache for port c53fd8cc-7c0a-4245-9d76-3fc1b4ecc21f {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1051.301376] env[63538]: DEBUG nova.compute.manager [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1051.341802] env[63538]: DEBUG nova.virt.hardware [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1051.341944] env[63538]: DEBUG nova.virt.hardware [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1051.342210] env[63538]: DEBUG nova.virt.hardware [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1051.342396] env[63538]: DEBUG nova.virt.hardware [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1051.342553] env[63538]: DEBUG nova.virt.hardware [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1051.342706] env[63538]: DEBUG nova.virt.hardware [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1051.342924] env[63538]: DEBUG nova.virt.hardware [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1051.343143] env[63538]: DEBUG nova.virt.hardware [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1051.343285] env[63538]: DEBUG nova.virt.hardware [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1051.343494] env[63538]: DEBUG nova.virt.hardware [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1051.343753] env[63538]: DEBUG nova.virt.hardware [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1051.344721] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a82e82-5f63-4080-ac91-19215359bfe9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.356867] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f107e008-0fab-4010-8d58-4f9e6f47fb6f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.373718] env[63538]: INFO nova.compute.manager [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] instance snapshotting [ 1051.374518] env[63538]: DEBUG nova.objects.instance [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lazy-loading 'flavor' on Instance uuid 3d80dc17-e330-4575-8e12-e06d8e76274a {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1051.578671] env[63538]: DEBUG oslo_vmware.api [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101529, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522796} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.579017] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af/5ef5fe70-fed9-4b3d-9d43-f01cf628d9af.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1051.580264] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1051.580264] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0b492da1-d02c-4d19-b3f6-710768fcc871 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.586765] env[63538]: DEBUG oslo_vmware.api [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1051.586765] env[63538]: value = "task-5101531" [ 1051.586765] env[63538]: _type = "Task" [ 1051.586765] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.596371] env[63538]: DEBUG oslo_vmware.api [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101531, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.646177] env[63538]: INFO nova.compute.claims [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1051.681546] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9ca75d91-f181-40ed-aafb-8b7c4ff1d19b tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "de68a921-bf67-4794-923d-4e062d8ff802" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.539s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.807789] env[63538]: DEBUG nova.compute.manager [req-9b5ca6a8-2e94-4bf9-b7a1-48ec737ba46f req-cdd86691-e785-4360-851f-244c64c7da6e service nova] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Received event network-changed-da8361d9-226a-4b83-967f-41c85a0d4920 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1051.808012] env[63538]: DEBUG nova.compute.manager [req-9b5ca6a8-2e94-4bf9-b7a1-48ec737ba46f req-cdd86691-e785-4360-851f-244c64c7da6e service nova] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Refreshing instance network info cache due to event network-changed-da8361d9-226a-4b83-967f-41c85a0d4920. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1051.808258] env[63538]: DEBUG oslo_concurrency.lockutils [req-9b5ca6a8-2e94-4bf9-b7a1-48ec737ba46f req-cdd86691-e785-4360-851f-244c64c7da6e service nova] Acquiring lock "refresh_cache-5ef5fe70-fed9-4b3d-9d43-f01cf628d9af" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1051.808404] env[63538]: DEBUG oslo_concurrency.lockutils [req-9b5ca6a8-2e94-4bf9-b7a1-48ec737ba46f req-cdd86691-e785-4360-851f-244c64c7da6e service nova] Acquired lock "refresh_cache-5ef5fe70-fed9-4b3d-9d43-f01cf628d9af" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.808569] env[63538]: DEBUG nova.network.neutron [req-9b5ca6a8-2e94-4bf9-b7a1-48ec737ba46f req-cdd86691-e785-4360-851f-244c64c7da6e service nova] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Refreshing network info cache for port da8361d9-226a-4b83-967f-41c85a0d4920 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1051.882431] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6653e62-0819-4e1d-9e95-ec398ef8352b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.912497] env[63538]: DEBUG nova.network.neutron [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Successfully updated port: c7228e85-42e9-42ec-b707-620c264a0f37 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1051.917168] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c71b4214-efcb-4a6d-be4b-1c927382c67a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.097926] env[63538]: DEBUG oslo_vmware.api [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101531, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.201893} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.098366] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1052.099208] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6267c665-1515-4e36-be12-c970b873d01d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.123974] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af/5ef5fe70-fed9-4b3d-9d43-f01cf628d9af.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1052.124390] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0dc1181c-4456-4981-910e-1cc0c7625f5d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.147163] env[63538]: DEBUG oslo_vmware.api [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1052.147163] env[63538]: value = "task-5101532" [ 1052.147163] env[63538]: _type = "Task" [ 1052.147163] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.157555] env[63538]: DEBUG oslo_vmware.api [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101532, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.161904] env[63538]: INFO nova.compute.resource_tracker [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Updating resource usage from migration 36b9ab62-693e-4437-b195-831dd146d846 [ 1052.210182] env[63538]: DEBUG nova.compute.manager [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Stashing vm_state: active {{(pid=63538) _prep_resize /opt/stack/nova/nova/compute/manager.py:5670}} [ 1052.237402] env[63538]: DEBUG nova.network.neutron [req-9a4a5da6-5c14-4a31-825d-63070a12ba1d req-46f80612-5651-4483-b3c1-d089ff9b3552 service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Updated VIF entry in instance network info cache for port c53fd8cc-7c0a-4245-9d76-3fc1b4ecc21f. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1052.238034] env[63538]: DEBUG nova.network.neutron [req-9a4a5da6-5c14-4a31-825d-63070a12ba1d req-46f80612-5651-4483-b3c1-d089ff9b3552 service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Updating instance_info_cache with network_info: [{"id": "8240a40a-4486-4213-ac28-8eee15d652a8", "address": "fa:16:3e:41:82:69", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8240a40a-44", "ovs_interfaceid": "8240a40a-4486-4213-ac28-8eee15d652a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3d1c322d-13b3-4f3d-a880-d456b548938f", "address": "fa:16:3e:10:ab:9e", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d1c322d-13", "ovs_interfaceid": "3d1c322d-13b3-4f3d-a880-d456b548938f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c53fd8cc-7c0a-4245-9d76-3fc1b4ecc21f", "address": "fa:16:3e:85:42:c6", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc53fd8cc-7c", "ovs_interfaceid": "c53fd8cc-7c0a-4245-9d76-3fc1b4ecc21f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.420671] env[63538]: DEBUG oslo_concurrency.lockutils [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "refresh_cache-cbd40984-29b6-4ed9-8c87-9fd4c80f6f13" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1052.422093] env[63538]: DEBUG oslo_concurrency.lockutils [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired lock "refresh_cache-cbd40984-29b6-4ed9-8c87-9fd4c80f6f13" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.422093] env[63538]: DEBUG nova.network.neutron [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1052.429088] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Creating Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1052.429600] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e94be5eb-9876-44d6-a9e1-bd89278be189 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.442486] env[63538]: DEBUG oslo_vmware.api [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1052.442486] env[63538]: value = "task-5101533" [ 1052.442486] env[63538]: _type = "Task" [ 1052.442486] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.457036] env[63538]: DEBUG oslo_vmware.api [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101533, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.477261] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d929aab-4e16-4365-aaf3-c60de3a61d38 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.490038] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e045e1-7618-4543-b175-809cba5260f7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.530822] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e8933f-cfaa-40f2-a93f-2d79e8e75461 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.540048] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4eec8a3-fbf5-42c1-85f4-977fa51d11bf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.557275] env[63538]: DEBUG nova.compute.provider_tree [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1052.658850] env[63538]: DEBUG oslo_vmware.api [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101532, 'name': ReconfigVM_Task, 'duration_secs': 0.374706} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.658850] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af/5ef5fe70-fed9-4b3d-9d43-f01cf628d9af.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1052.659624] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e2529753-c81b-4914-a851-dc1adcdfcf7c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.668233] env[63538]: DEBUG oslo_vmware.api [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1052.668233] env[63538]: value = "task-5101534" [ 1052.668233] env[63538]: _type = "Task" [ 1052.668233] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.674305] env[63538]: DEBUG nova.network.neutron [req-9b5ca6a8-2e94-4bf9-b7a1-48ec737ba46f req-cdd86691-e785-4360-851f-244c64c7da6e service nova] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Updated VIF entry in instance network info cache for port da8361d9-226a-4b83-967f-41c85a0d4920. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1052.674747] env[63538]: DEBUG nova.network.neutron [req-9b5ca6a8-2e94-4bf9-b7a1-48ec737ba46f req-cdd86691-e785-4360-851f-244c64c7da6e service nova] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Updating instance_info_cache with network_info: [{"id": "da8361d9-226a-4b83-967f-41c85a0d4920", "address": "fa:16:3e:99:9a:a9", "network": {"id": "4d592811-793b-45d6-a6d3-fff8ca7bbf30", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-654803980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df090f9a727d4cf4a0f466e27928bdc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda8361d9-22", "ovs_interfaceid": "da8361d9-226a-4b83-967f-41c85a0d4920", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.681035] env[63538]: DEBUG oslo_vmware.api [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101534, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.736057] env[63538]: DEBUG oslo_concurrency.lockutils [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1052.741468] env[63538]: DEBUG oslo_concurrency.lockutils [req-9a4a5da6-5c14-4a31-825d-63070a12ba1d req-46f80612-5651-4483-b3c1-d089ff9b3552 service nova] Releasing lock "refresh_cache-209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1052.741830] env[63538]: DEBUG nova.compute.manager [req-9a4a5da6-5c14-4a31-825d-63070a12ba1d req-46f80612-5651-4483-b3c1-d089ff9b3552 service nova] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Received event network-vif-deleted-f05ac716-e765-4cba-958a-3ab686f9dbf9 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1052.799020] env[63538]: DEBUG nova.compute.manager [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1052.799478] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e98e2030-f991-4f4b-a820-29ce65dc0114 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.955506] env[63538]: DEBUG oslo_vmware.api [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101533, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.972801] env[63538]: DEBUG nova.network.neutron [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1053.061786] env[63538]: DEBUG nova.scheduler.client.report [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1053.185437] env[63538]: DEBUG oslo_concurrency.lockutils [req-9b5ca6a8-2e94-4bf9-b7a1-48ec737ba46f req-cdd86691-e785-4360-851f-244c64c7da6e service nova] Releasing lock "refresh_cache-5ef5fe70-fed9-4b3d-9d43-f01cf628d9af" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.187122] env[63538]: DEBUG oslo_vmware.api [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101534, 'name': Rename_Task, 'duration_secs': 0.164243} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.187122] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1053.187122] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-540d8006-8a50-4775-83ff-f1882d894ccf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.195644] env[63538]: DEBUG oslo_vmware.api [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1053.195644] env[63538]: value = "task-5101535" [ 1053.195644] env[63538]: _type = "Task" [ 1053.195644] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.210845] env[63538]: DEBUG oslo_vmware.api [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101535, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.222604] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "87f8bb3e-6f32-4850-ac54-efad0befb268" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1053.222882] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "87f8bb3e-6f32-4850-ac54-efad0befb268" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.223123] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "87f8bb3e-6f32-4850-ac54-efad0befb268-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1053.223329] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "87f8bb3e-6f32-4850-ac54-efad0befb268-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.223666] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "87f8bb3e-6f32-4850-ac54-efad0befb268-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.230494] env[63538]: INFO nova.compute.manager [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Terminating instance [ 1053.234610] env[63538]: DEBUG nova.compute.manager [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1053.234904] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1053.235972] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a02ed6b-92f2-428f-af25-18acddef2a3d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.247242] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1053.247539] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4b25bcce-7ef1-4c5a-b957-270567304fd7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.256166] env[63538]: DEBUG oslo_vmware.api [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 1053.256166] env[63538]: value = "task-5101536" [ 1053.256166] env[63538]: _type = "Task" [ 1053.256166] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.267581] env[63538]: DEBUG oslo_vmware.api [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101536, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.314040] env[63538]: INFO nova.compute.manager [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] instance snapshotting [ 1053.317941] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa741e5-2284-4a79-a99c-c2643b3f1255 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.340074] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc9b7ec0-3cc7-4e62-91d0-a6e63c090e97 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.347249] env[63538]: DEBUG nova.network.neutron [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Updating instance_info_cache with network_info: [{"id": "c7228e85-42e9-42ec-b707-620c264a0f37", "address": "fa:16:3e:ff:02:09", "network": {"id": "01450801-f549-4844-80bd-208ec405c65f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684310085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "492427e54e1048f292dab2abdac71af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7228e85-42", "ovs_interfaceid": "c7228e85-42e9-42ec-b707-620c264a0f37", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.367909] env[63538]: DEBUG oslo_concurrency.lockutils [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "interface-209c5f46-9c63-4f55-bc75-bc2e4da989ac-3d1c322d-13b3-4f3d-a880-d456b548938f" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1053.368254] env[63538]: DEBUG oslo_concurrency.lockutils [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "interface-209c5f46-9c63-4f55-bc75-bc2e4da989ac-3d1c322d-13b3-4f3d-a880-d456b548938f" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.456999] env[63538]: DEBUG oslo_vmware.api [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101533, 'name': CreateSnapshot_Task, 'duration_secs': 0.55979} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.457310] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Created Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1053.458172] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070872c5-5aa9-41c2-949d-b7246fd340a2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.471464] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-240a10d1-a4e7-4a60-8a29-f467e39010ec tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Volume attach. Driver type: vmdk {{(pid=63538) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1053.471701] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-240a10d1-a4e7-4a60-8a29-f467e39010ec tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992489', 'volume_id': 'd09b8ebf-5a95-4a50-ba17-6eaf05d0b995', 'name': 'volume-d09b8ebf-5a95-4a50-ba17-6eaf05d0b995', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '431a67e6-b90d-4930-9a86-7c49d1022ddc', 'attached_at': '', 'detached_at': '', 'volume_id': 'd09b8ebf-5a95-4a50-ba17-6eaf05d0b995', 'serial': 'd09b8ebf-5a95-4a50-ba17-6eaf05d0b995'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1053.472760] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63785fc-e45e-4033-afb5-8e65bac925ee {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.490171] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d439abc-41d1-40a5-9c49-8510ccc08639 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.515857] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-240a10d1-a4e7-4a60-8a29-f467e39010ec tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] volume-d09b8ebf-5a95-4a50-ba17-6eaf05d0b995/volume-d09b8ebf-5a95-4a50-ba17-6eaf05d0b995.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1053.517775] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b7608c0-e7f6-466b-8871-d68a16aabe89 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.531875] env[63538]: DEBUG nova.compute.manager [req-26308545-1195-4c7c-99f4-6ea416183ff2 req-2052bee5-8324-4d20-bd01-f183cf3b8864 service nova] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Received event network-vif-plugged-c7228e85-42e9-42ec-b707-620c264a0f37 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1053.532114] env[63538]: DEBUG oslo_concurrency.lockutils [req-26308545-1195-4c7c-99f4-6ea416183ff2 req-2052bee5-8324-4d20-bd01-f183cf3b8864 service nova] Acquiring lock "cbd40984-29b6-4ed9-8c87-9fd4c80f6f13-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1053.532342] env[63538]: DEBUG oslo_concurrency.lockutils [req-26308545-1195-4c7c-99f4-6ea416183ff2 req-2052bee5-8324-4d20-bd01-f183cf3b8864 service nova] Lock "cbd40984-29b6-4ed9-8c87-9fd4c80f6f13-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.533199] env[63538]: DEBUG oslo_concurrency.lockutils [req-26308545-1195-4c7c-99f4-6ea416183ff2 req-2052bee5-8324-4d20-bd01-f183cf3b8864 service nova] Lock "cbd40984-29b6-4ed9-8c87-9fd4c80f6f13-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.533199] env[63538]: DEBUG nova.compute.manager [req-26308545-1195-4c7c-99f4-6ea416183ff2 req-2052bee5-8324-4d20-bd01-f183cf3b8864 service nova] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] No waiting events found dispatching network-vif-plugged-c7228e85-42e9-42ec-b707-620c264a0f37 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1053.533199] env[63538]: WARNING nova.compute.manager [req-26308545-1195-4c7c-99f4-6ea416183ff2 req-2052bee5-8324-4d20-bd01-f183cf3b8864 service nova] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Received unexpected event network-vif-plugged-c7228e85-42e9-42ec-b707-620c264a0f37 for instance with vm_state building and task_state spawning. [ 1053.533199] env[63538]: DEBUG nova.compute.manager [req-26308545-1195-4c7c-99f4-6ea416183ff2 req-2052bee5-8324-4d20-bd01-f183cf3b8864 service nova] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Received event network-changed-c7228e85-42e9-42ec-b707-620c264a0f37 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1053.533199] env[63538]: DEBUG nova.compute.manager [req-26308545-1195-4c7c-99f4-6ea416183ff2 req-2052bee5-8324-4d20-bd01-f183cf3b8864 service nova] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Refreshing instance network info cache due to event network-changed-c7228e85-42e9-42ec-b707-620c264a0f37. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1053.533371] env[63538]: DEBUG oslo_concurrency.lockutils [req-26308545-1195-4c7c-99f4-6ea416183ff2 req-2052bee5-8324-4d20-bd01-f183cf3b8864 service nova] Acquiring lock "refresh_cache-cbd40984-29b6-4ed9-8c87-9fd4c80f6f13" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1053.539600] env[63538]: DEBUG oslo_vmware.api [None req-240a10d1-a4e7-4a60-8a29-f467e39010ec tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 1053.539600] env[63538]: value = "task-5101537" [ 1053.539600] env[63538]: _type = "Task" [ 1053.539600] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.551457] env[63538]: DEBUG oslo_vmware.api [None req-240a10d1-a4e7-4a60-8a29-f467e39010ec tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101537, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.567364] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.431s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.567724] env[63538]: INFO nova.compute.manager [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Migrating [ 1053.574808] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.163s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.575080] env[63538]: DEBUG nova.objects.instance [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Lazy-loading 'resources' on Instance uuid 148790a7-0a35-4d26-ae9f-6f954a161c88 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1053.708120] env[63538]: DEBUG oslo_vmware.api [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101535, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.771040] env[63538]: DEBUG oslo_vmware.api [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101536, 'name': PowerOffVM_Task, 'duration_secs': 0.253439} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.771508] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1053.772043] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1053.772210] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-113b6606-e35c-44f5-96b0-b07d2a7e5673 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.847463] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1053.847873] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1053.848151] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Deleting the datastore file [datastore1] 87f8bb3e-6f32-4850-ac54-efad0befb268 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1053.848435] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e2bec643-30ad-4096-b1f2-5f3267dbaafe {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.852517] env[63538]: DEBUG oslo_concurrency.lockutils [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Releasing lock "refresh_cache-cbd40984-29b6-4ed9-8c87-9fd4c80f6f13" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.852843] env[63538]: DEBUG nova.compute.manager [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Instance network_info: |[{"id": "c7228e85-42e9-42ec-b707-620c264a0f37", "address": "fa:16:3e:ff:02:09", "network": {"id": "01450801-f549-4844-80bd-208ec405c65f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684310085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "492427e54e1048f292dab2abdac71af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7228e85-42", "ovs_interfaceid": "c7228e85-42e9-42ec-b707-620c264a0f37", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1053.853822] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Creating Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1053.854331] env[63538]: DEBUG oslo_concurrency.lockutils [req-26308545-1195-4c7c-99f4-6ea416183ff2 req-2052bee5-8324-4d20-bd01-f183cf3b8864 service nova] Acquired lock "refresh_cache-cbd40984-29b6-4ed9-8c87-9fd4c80f6f13" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.854413] env[63538]: DEBUG nova.network.neutron [req-26308545-1195-4c7c-99f4-6ea416183ff2 req-2052bee5-8324-4d20-bd01-f183cf3b8864 service nova] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Refreshing network info cache for port c7228e85-42e9-42ec-b707-620c264a0f37 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1053.855687] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:02:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32463b6d-4569-4755-8a29-873a028690a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c7228e85-42e9-42ec-b707-620c264a0f37', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1053.864494] env[63538]: DEBUG oslo.service.loopingcall [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1053.864797] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-446ef803-8ac9-4204-bcad-f2e1e8a8ecfa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.871891] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1053.872683] env[63538]: DEBUG oslo_vmware.api [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for the task: (returnval){ [ 1053.872683] env[63538]: value = "task-5101539" [ 1053.872683] env[63538]: _type = "Task" [ 1053.872683] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.873143] env[63538]: DEBUG oslo_concurrency.lockutils [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1053.873357] env[63538]: DEBUG oslo_concurrency.lockutils [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.873631] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94586dbf-14ff-48a7-9f89-80a7b9c0ba31 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.890669] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7778a67-2140-4074-8cfa-4df05a1b81cd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.899695] env[63538]: DEBUG oslo_vmware.api [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 1053.899695] env[63538]: value = "task-5101540" [ 1053.899695] env[63538]: _type = "Task" [ 1053.899695] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.923690] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1053.923690] env[63538]: value = "task-5101541" [ 1053.923690] env[63538]: _type = "Task" [ 1053.923690] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.924781] env[63538]: DEBUG oslo_vmware.api [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101539, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.926326] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed9ec19-a937-4efb-9ef4-d622b8270b34 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.936575] env[63538]: DEBUG oslo_vmware.api [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101540, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.943530] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101541, 'name': CreateVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.973024] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Reconfiguring VM to detach interface {{(pid=63538) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1974}} [ 1053.973024] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-752ebf5c-3ea2-4747-84e9-ce6a43a8c833 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.996502] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Creating linked-clone VM from snapshot {{(pid=63538) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1053.996691] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-24ae05e0-1026-4d73-8030-810fcc8ceb4b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.006979] env[63538]: DEBUG oslo_vmware.api [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1054.006979] env[63538]: value = "task-5101542" [ 1054.006979] env[63538]: _type = "Task" [ 1054.006979] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.008645] env[63538]: DEBUG oslo_vmware.api [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1054.008645] env[63538]: value = "task-5101543" [ 1054.008645] env[63538]: _type = "Task" [ 1054.008645] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.024945] env[63538]: DEBUG oslo_vmware.api [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101543, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.025173] env[63538]: DEBUG oslo_vmware.api [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101542, 'name': CloneVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.053256] env[63538]: DEBUG oslo_vmware.api [None req-240a10d1-a4e7-4a60-8a29-f467e39010ec tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101537, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.085024] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "refresh_cache-fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1054.085271] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired lock "refresh_cache-fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.085437] env[63538]: DEBUG nova.network.neutron [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1054.214243] env[63538]: DEBUG oslo_vmware.api [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101535, 'name': PowerOnVM_Task, 'duration_secs': 0.515144} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.214637] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1054.214912] env[63538]: INFO nova.compute.manager [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Took 8.20 seconds to spawn the instance on the hypervisor. [ 1054.215167] env[63538]: DEBUG nova.compute.manager [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1054.217486] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca696355-52bf-432f-bdbd-4e8d33c87c02 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.408825] env[63538]: DEBUG oslo_vmware.api [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Task: {'id': task-5101539, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.205873} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.409685] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b8d6e54-6535-42fe-b788-59664a2671ab {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.413495] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1054.413839] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1054.414115] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1054.414384] env[63538]: INFO nova.compute.manager [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1054.414711] env[63538]: DEBUG oslo.service.loopingcall [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1054.415242] env[63538]: DEBUG nova.compute.manager [-] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1054.415517] env[63538]: DEBUG nova.network.neutron [-] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1054.422946] env[63538]: DEBUG oslo_vmware.api [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101540, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.426305] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86286b8c-1ac5-4f3a-9b3e-4a13dfc9bc6d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.444102] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101541, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.473788] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae16eb10-ce7a-42a7-b8fb-041b1ea0415c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.478595] env[63538]: DEBUG nova.network.neutron [req-26308545-1195-4c7c-99f4-6ea416183ff2 req-2052bee5-8324-4d20-bd01-f183cf3b8864 service nova] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Updated VIF entry in instance network info cache for port c7228e85-42e9-42ec-b707-620c264a0f37. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1054.479068] env[63538]: DEBUG nova.network.neutron [req-26308545-1195-4c7c-99f4-6ea416183ff2 req-2052bee5-8324-4d20-bd01-f183cf3b8864 service nova] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Updating instance_info_cache with network_info: [{"id": "c7228e85-42e9-42ec-b707-620c264a0f37", "address": "fa:16:3e:ff:02:09", "network": {"id": "01450801-f549-4844-80bd-208ec405c65f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684310085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "492427e54e1048f292dab2abdac71af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7228e85-42", "ovs_interfaceid": "c7228e85-42e9-42ec-b707-620c264a0f37", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1054.484331] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c619b9-e0dd-4dc8-83ee-0c7e25053313 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.503055] env[63538]: DEBUG nova.compute.provider_tree [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1054.523192] env[63538]: DEBUG oslo_vmware.api [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101542, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.527814] env[63538]: DEBUG oslo_vmware.api [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101543, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.552210] env[63538]: DEBUG oslo_vmware.api [None req-240a10d1-a4e7-4a60-8a29-f467e39010ec tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101537, 'name': ReconfigVM_Task, 'duration_secs': 0.598434} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.552210] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-240a10d1-a4e7-4a60-8a29-f467e39010ec tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Reconfigured VM instance instance-00000058 to attach disk [datastore1] volume-d09b8ebf-5a95-4a50-ba17-6eaf05d0b995/volume-d09b8ebf-5a95-4a50-ba17-6eaf05d0b995.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1054.558397] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5d8ed7c-1115-46af-a22f-8926a5781476 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.581897] env[63538]: DEBUG oslo_vmware.api [None req-240a10d1-a4e7-4a60-8a29-f467e39010ec tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 1054.581897] env[63538]: value = "task-5101544" [ 1054.581897] env[63538]: _type = "Task" [ 1054.581897] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.596196] env[63538]: DEBUG oslo_vmware.api [None req-240a10d1-a4e7-4a60-8a29-f467e39010ec tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101544, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.748542] env[63538]: INFO nova.compute.manager [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Took 18.97 seconds to build instance. [ 1054.911066] env[63538]: DEBUG oslo_vmware.api [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101540, 'name': CreateSnapshot_Task, 'duration_secs': 0.778033} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.911377] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Created Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1054.912196] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf90ee0-9e48-4c6e-8744-ff1f2415e8e1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.941594] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101541, 'name': CreateVM_Task, 'duration_secs': 0.588856} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.941776] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1054.942498] env[63538]: DEBUG oslo_concurrency.lockutils [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1054.942790] env[63538]: DEBUG oslo_concurrency.lockutils [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.943016] env[63538]: DEBUG oslo_concurrency.lockutils [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1054.943308] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea582d57-69ec-4e2c-adca-c94fc0838b73 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.949430] env[63538]: DEBUG oslo_vmware.api [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1054.949430] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52808d8b-7dfd-2318-42f0-7891bddbdfe6" [ 1054.949430] env[63538]: _type = "Task" [ 1054.949430] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.959102] env[63538]: DEBUG oslo_vmware.api [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52808d8b-7dfd-2318-42f0-7891bddbdfe6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.983859] env[63538]: DEBUG oslo_concurrency.lockutils [req-26308545-1195-4c7c-99f4-6ea416183ff2 req-2052bee5-8324-4d20-bd01-f183cf3b8864 service nova] Releasing lock "refresh_cache-cbd40984-29b6-4ed9-8c87-9fd4c80f6f13" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1055.006179] env[63538]: DEBUG nova.scheduler.client.report [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1055.027982] env[63538]: DEBUG oslo_vmware.api [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101543, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.029193] env[63538]: DEBUG oslo_vmware.api [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101542, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.097366] env[63538]: DEBUG oslo_vmware.api [None req-240a10d1-a4e7-4a60-8a29-f467e39010ec tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101544, 'name': ReconfigVM_Task, 'duration_secs': 0.195424} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.097793] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-240a10d1-a4e7-4a60-8a29-f467e39010ec tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992489', 'volume_id': 'd09b8ebf-5a95-4a50-ba17-6eaf05d0b995', 'name': 'volume-d09b8ebf-5a95-4a50-ba17-6eaf05d0b995', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '431a67e6-b90d-4930-9a86-7c49d1022ddc', 'attached_at': '', 'detached_at': '', 'volume_id': 'd09b8ebf-5a95-4a50-ba17-6eaf05d0b995', 'serial': 'd09b8ebf-5a95-4a50-ba17-6eaf05d0b995'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1055.119332] env[63538]: DEBUG nova.network.neutron [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Updating instance_info_cache with network_info: [{"id": "c5eca2fb-a7a7-43a0-a2a0-5016e8c11f78", "address": "fa:16:3e:85:66:7a", "network": {"id": "8facc2ff-7a17-4beb-ad4f-7cf9d95cc93c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-137157780-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a701618902d411b8af203fdbb1069be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5eca2fb-a7", "ovs_interfaceid": "c5eca2fb-a7a7-43a0-a2a0-5016e8c11f78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.250621] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e73e31dc-e028-4289-9f57-b22669f60196 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "5ef5fe70-fed9-4b3d-9d43-f01cf628d9af" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.493s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.256420] env[63538]: DEBUG nova.network.neutron [-] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.432153] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Creating linked-clone VM from snapshot {{(pid=63538) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1055.432854] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d2c65206-3ee6-45f5-8c62-113ba833f0d8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.443260] env[63538]: DEBUG oslo_vmware.api [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 1055.443260] env[63538]: value = "task-5101545" [ 1055.443260] env[63538]: _type = "Task" [ 1055.443260] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.457702] env[63538]: DEBUG oslo_vmware.api [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101545, 'name': CloneVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.469977] env[63538]: DEBUG oslo_vmware.api [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52808d8b-7dfd-2318-42f0-7891bddbdfe6, 'name': SearchDatastore_Task, 'duration_secs': 0.022251} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.470440] env[63538]: DEBUG oslo_concurrency.lockutils [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1055.471418] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1055.471418] env[63538]: DEBUG oslo_concurrency.lockutils [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1055.471418] env[63538]: DEBUG oslo_concurrency.lockutils [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.471418] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1055.472037] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0d58ee8e-ec32-4cc7-94cd-7b044ba3fa81 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.493751] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1055.494078] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1055.495175] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62a1cc18-cdc7-4c56-84bb-46724d51a8d9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.503251] env[63538]: DEBUG oslo_vmware.api [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1055.503251] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526cd88e-8e9f-1318-1762-66383b54a9b7" [ 1055.503251] env[63538]: _type = "Task" [ 1055.503251] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.513703] env[63538]: DEBUG oslo_vmware.api [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526cd88e-8e9f-1318-1762-66383b54a9b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.518597] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.944s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.522809] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.150s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.523126] env[63538]: DEBUG nova.objects.instance [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lazy-loading 'resources' on Instance uuid 7752c64f-693f-4cf3-951c-7ee0657f1682 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.543053] env[63538]: DEBUG oslo_vmware.api [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101543, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.543349] env[63538]: DEBUG oslo_vmware.api [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101542, 'name': CloneVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.555513] env[63538]: INFO nova.scheduler.client.report [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Deleted allocations for instance 148790a7-0a35-4d26-ae9f-6f954a161c88 [ 1055.564571] env[63538]: DEBUG nova.compute.manager [req-75953214-268f-4095-9bdf-967a3104d9fb req-65f4a52f-e7b2-41e3-86b4-6292cc63f5b0 service nova] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Received event network-vif-deleted-6edf1839-d708-46ad-836d-e2ecac08730f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1055.621436] env[63538]: DEBUG oslo_concurrency.lockutils [None req-abf1e456-d013-4f1d-8755-ecc67c742e0c tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "5ef5fe70-fed9-4b3d-9d43-f01cf628d9af" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1055.622840] env[63538]: DEBUG oslo_concurrency.lockutils [None req-abf1e456-d013-4f1d-8755-ecc67c742e0c tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "5ef5fe70-fed9-4b3d-9d43-f01cf628d9af" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.622840] env[63538]: DEBUG nova.compute.manager [None req-abf1e456-d013-4f1d-8755-ecc67c742e0c tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1055.622840] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Releasing lock "refresh_cache-fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1055.625880] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4578bf93-ccf1-4fd3-840b-0c69057376ed {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.637479] env[63538]: DEBUG nova.compute.manager [None req-abf1e456-d013-4f1d-8755-ecc67c742e0c tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63538) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1055.638341] env[63538]: DEBUG nova.objects.instance [None req-abf1e456-d013-4f1d-8755-ecc67c742e0c tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lazy-loading 'flavor' on Instance uuid 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.759478] env[63538]: INFO nova.compute.manager [-] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Took 1.34 seconds to deallocate network for instance. [ 1055.956388] env[63538]: DEBUG oslo_vmware.api [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101545, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.015626] env[63538]: DEBUG oslo_vmware.api [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526cd88e-8e9f-1318-1762-66383b54a9b7, 'name': SearchDatastore_Task, 'duration_secs': 0.034712} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.020101] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa4a9f9a-f940-4c83-9470-05d094c21350 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.031417] env[63538]: DEBUG oslo_vmware.api [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101542, 'name': CloneVM_Task, 'duration_secs': 1.583218} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.034380] env[63538]: INFO nova.virt.vmwareapi.vmops [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Created linked-clone VM from snapshot [ 1056.037773] env[63538]: DEBUG oslo_vmware.api [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1056.037773] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cf579f-a198-63f2-515a-37413e9e2b8a" [ 1056.037773] env[63538]: _type = "Task" [ 1056.037773] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.043931] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ad79c8-d6d6-4218-bb8f-66a94c034286 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.047686] env[63538]: DEBUG oslo_vmware.api [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101543, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.059849] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Uploading image 2e0382b2-f42e-4c8c-8f41-ce9c70949ae8 {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 1056.071115] env[63538]: DEBUG oslo_vmware.api [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cf579f-a198-63f2-515a-37413e9e2b8a, 'name': SearchDatastore_Task, 'duration_secs': 0.012748} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.074314] env[63538]: DEBUG oslo_concurrency.lockutils [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1056.074314] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] cbd40984-29b6-4ed9-8c87-9fd4c80f6f13/cbd40984-29b6-4ed9-8c87-9fd4c80f6f13.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1056.074314] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8d27f8b3-212f-4b79-a8c5-e851cd1bdd20 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.075144] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f313dfdf-acb4-4dfa-9bdd-74043589130b tempest-ServersListShow296Test-1140598576 tempest-ServersListShow296Test-1140598576-project-member] Lock "148790a7-0a35-4d26-ae9f-6f954a161c88" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.710s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.089247] env[63538]: DEBUG oslo_vmware.api [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1056.089247] env[63538]: value = "task-5101546" [ 1056.089247] env[63538]: _type = "Task" [ 1056.089247] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.103565] env[63538]: DEBUG oslo_vmware.api [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101546, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.128023] env[63538]: DEBUG oslo_vmware.rw_handles [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1056.128023] env[63538]: value = "vm-992492" [ 1056.128023] env[63538]: _type = "VirtualMachine" [ 1056.128023] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1056.128023] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6cfee036-dc9e-4693-b694-3956ce67da28 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.148638] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-abf1e456-d013-4f1d-8755-ecc67c742e0c tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1056.149149] env[63538]: DEBUG oslo_vmware.rw_handles [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lease: (returnval){ [ 1056.149149] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525d232b-8483-ddf3-a5f0-02b7dccd693b" [ 1056.149149] env[63538]: _type = "HttpNfcLease" [ 1056.149149] env[63538]: } obtained for exporting VM: (result){ [ 1056.149149] env[63538]: value = "vm-992492" [ 1056.149149] env[63538]: _type = "VirtualMachine" [ 1056.149149] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1056.149460] env[63538]: DEBUG oslo_vmware.api [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the lease: (returnval){ [ 1056.149460] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525d232b-8483-ddf3-a5f0-02b7dccd693b" [ 1056.149460] env[63538]: _type = "HttpNfcLease" [ 1056.149460] env[63538]: } to be ready. {{(pid=63538) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1056.150314] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6e2cc744-e67d-40c7-80e5-052e73e48260 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.167738] env[63538]: DEBUG nova.objects.instance [None req-240a10d1-a4e7-4a60-8a29-f467e39010ec tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lazy-loading 'flavor' on Instance uuid 431a67e6-b90d-4930-9a86-7c49d1022ddc {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1056.172794] env[63538]: DEBUG oslo_vmware.api [None req-abf1e456-d013-4f1d-8755-ecc67c742e0c tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1056.172794] env[63538]: value = "task-5101548" [ 1056.172794] env[63538]: _type = "Task" [ 1056.172794] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.172794] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1056.172794] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525d232b-8483-ddf3-a5f0-02b7dccd693b" [ 1056.172794] env[63538]: _type = "HttpNfcLease" [ 1056.172794] env[63538]: } is ready. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1056.174038] env[63538]: DEBUG oslo_vmware.rw_handles [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1056.174038] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525d232b-8483-ddf3-a5f0-02b7dccd693b" [ 1056.174038] env[63538]: _type = "HttpNfcLease" [ 1056.174038] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1056.178261] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e932c96-400f-46a2-95f5-d742d273f586 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.193390] env[63538]: DEBUG oslo_vmware.api [None req-abf1e456-d013-4f1d-8755-ecc67c742e0c tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101548, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.193931] env[63538]: DEBUG oslo_vmware.rw_handles [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52633ee1-8a3a-718b-6e27-14b9986e06d7/disk-0.vmdk from lease info. {{(pid=63538) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1056.193931] env[63538]: DEBUG oslo_vmware.rw_handles [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52633ee1-8a3a-718b-6e27-14b9986e06d7/disk-0.vmdk for reading. {{(pid=63538) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1056.270371] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.364767] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a59bf95b-0429-4c5a-ab2b-197afb7d1c19 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.462856] env[63538]: DEBUG oslo_vmware.api [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101545, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.467387] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-087c5f45-ead4-451b-b80a-66d9b242b3e3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.479764] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1824d1-79ee-41eb-8291-d184e787d455 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.514961] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c5d12a-a4c8-4563-917e-9bb036b2e647 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.528600] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b19a1678-bb99-4c10-b0c9-e9576a9c9fc6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.542426] env[63538]: DEBUG oslo_vmware.api [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101543, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.556479] env[63538]: DEBUG nova.compute.provider_tree [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1056.600259] env[63538]: DEBUG oslo_vmware.api [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101546, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.682047] env[63538]: DEBUG oslo_concurrency.lockutils [None req-240a10d1-a4e7-4a60-8a29-f467e39010ec tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "431a67e6-b90d-4930-9a86-7c49d1022ddc" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.852s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.691744] env[63538]: DEBUG oslo_vmware.api [None req-abf1e456-d013-4f1d-8755-ecc67c742e0c tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101548, 'name': PowerOffVM_Task, 'duration_secs': 0.292787} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.692112] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-abf1e456-d013-4f1d-8755-ecc67c742e0c tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1056.692321] env[63538]: DEBUG nova.compute.manager [None req-abf1e456-d013-4f1d-8755-ecc67c742e0c tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1056.693259] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49d92c75-4353-410a-99f4-cc5e417a3fe8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.960903] env[63538]: DEBUG oslo_vmware.api [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101545, 'name': CloneVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.048832] env[63538]: DEBUG oslo_vmware.api [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101543, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.060085] env[63538]: DEBUG nova.scheduler.client.report [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1057.108610] env[63538]: DEBUG oslo_vmware.api [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101546, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565776} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.108610] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] cbd40984-29b6-4ed9-8c87-9fd4c80f6f13/cbd40984-29b6-4ed9-8c87-9fd4c80f6f13.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1057.108610] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1057.108930] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-974acb70-09b6-4d17-88c1-af1444613367 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.119174] env[63538]: DEBUG oslo_vmware.api [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1057.119174] env[63538]: value = "task-5101549" [ 1057.119174] env[63538]: _type = "Task" [ 1057.119174] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.130912] env[63538]: DEBUG oslo_vmware.api [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101549, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.161379] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d5d2849-9822-42b6-83e3-0fca83765029 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.187697] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Updating instance 'fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9' progress to 0 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1057.211731] env[63538]: DEBUG oslo_concurrency.lockutils [None req-abf1e456-d013-4f1d-8755-ecc67c742e0c tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "5ef5fe70-fed9-4b3d-9d43-f01cf628d9af" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.589s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.463708] env[63538]: DEBUG oslo_vmware.api [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101545, 'name': CloneVM_Task, 'duration_secs': 1.556291} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.464019] env[63538]: INFO nova.virt.vmwareapi.vmops [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Created linked-clone VM from snapshot [ 1057.464853] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56862bf5-46b1-4cbd-91c0-21c11da47aa1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.476901] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Uploading image 5556071f-96ea-45af-8248-360d719b2d5e {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 1057.489350] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Destroying the VM {{(pid=63538) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 1057.489689] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c899ad89-1314-4c2f-b241-6a7299ed744a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.498527] env[63538]: DEBUG oslo_vmware.api [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 1057.498527] env[63538]: value = "task-5101550" [ 1057.498527] env[63538]: _type = "Task" [ 1057.498527] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.512143] env[63538]: DEBUG oslo_vmware.api [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101550, 'name': Destroy_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.538981] env[63538]: DEBUG oslo_vmware.api [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101543, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.578307] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.057s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.581784] env[63538]: DEBUG oslo_concurrency.lockutils [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 4.845s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.626947] env[63538]: INFO nova.scheduler.client.report [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Deleted allocations for instance 7752c64f-693f-4cf3-951c-7ee0657f1682 [ 1057.638547] env[63538]: DEBUG oslo_vmware.api [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101549, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074063} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.638818] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1057.639765] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c318cec0-0813-4f1f-ae12-4f7bedc95474 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.671339] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] cbd40984-29b6-4ed9-8c87-9fd4c80f6f13/cbd40984-29b6-4ed9-8c87-9fd4c80f6f13.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1057.671897] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b345061-0200-4c2e-b97a-8bb85defc1c8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.694371] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1057.697041] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-33ee0a4e-c037-4efc-841a-7be3fed961bc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.699641] env[63538]: DEBUG oslo_vmware.api [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1057.699641] env[63538]: value = "task-5101551" [ 1057.699641] env[63538]: _type = "Task" [ 1057.699641] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.710405] env[63538]: DEBUG oslo_vmware.api [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1057.710405] env[63538]: value = "task-5101552" [ 1057.710405] env[63538]: _type = "Task" [ 1057.710405] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.720779] env[63538]: DEBUG oslo_vmware.api [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101551, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.728957] env[63538]: DEBUG oslo_vmware.api [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101552, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.018501] env[63538]: DEBUG oslo_vmware.api [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101550, 'name': Destroy_Task, 'duration_secs': 0.411574} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.024019] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Destroyed the VM [ 1058.024019] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Deleting Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1058.025455] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-96b0323b-8469-4e12-bc94-22d651627c40 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.043607] env[63538]: DEBUG oslo_vmware.api [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101543, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.046915] env[63538]: DEBUG oslo_vmware.api [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 1058.046915] env[63538]: value = "task-5101553" [ 1058.046915] env[63538]: _type = "Task" [ 1058.046915] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.060546] env[63538]: DEBUG oslo_vmware.api [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101553, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.087321] env[63538]: INFO nova.compute.claims [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1058.138827] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1b140765-132e-4d01-8077-3b6f06a9e9ae tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "7752c64f-693f-4cf3-951c-7ee0657f1682" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.745s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1058.212544] env[63538]: DEBUG oslo_vmware.api [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101551, 'name': ReconfigVM_Task, 'duration_secs': 0.482421} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.217494] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Reconfigured VM instance instance-00000062 to attach disk [datastore1] cbd40984-29b6-4ed9-8c87-9fd4c80f6f13/cbd40984-29b6-4ed9-8c87-9fd4c80f6f13.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1058.218232] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6434284f-6cea-4229-a3eb-ddd057349a6f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.226791] env[63538]: DEBUG oslo_vmware.api [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101552, 'name': PowerOffVM_Task, 'duration_secs': 0.247625} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.228763] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1058.229451] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Updating instance 'fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9' progress to 17 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1058.236760] env[63538]: DEBUG oslo_vmware.api [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1058.236760] env[63538]: value = "task-5101554" [ 1058.236760] env[63538]: _type = "Task" [ 1058.236760] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.251037] env[63538]: DEBUG oslo_vmware.api [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101554, 'name': Rename_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.437724] env[63538]: DEBUG oslo_concurrency.lockutils [None req-35667ae6-191a-4d40-a40f-303a4257ff5c tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "431a67e6-b90d-4930-9a86-7c49d1022ddc" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.438232] env[63538]: DEBUG oslo_concurrency.lockutils [None req-35667ae6-191a-4d40-a40f-303a4257ff5c tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "431a67e6-b90d-4930-9a86-7c49d1022ddc" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1058.509326] env[63538]: INFO nova.compute.manager [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Rebuilding instance [ 1058.542470] env[63538]: DEBUG oslo_vmware.api [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101543, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.562516] env[63538]: DEBUG oslo_vmware.api [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101553, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.572337] env[63538]: DEBUG nova.compute.manager [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1058.573278] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9a7f08c-3f6a-4814-a67a-3ae0636c1959 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.598397] env[63538]: INFO nova.compute.resource_tracker [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Updating resource usage from migration 84c74eba-0557-41e9-a3f5-396efa857140 [ 1058.739905] env[63538]: DEBUG nova.virt.hardware [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1058.744027] env[63538]: DEBUG nova.virt.hardware [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1058.744027] env[63538]: DEBUG nova.virt.hardware [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1058.744027] env[63538]: DEBUG nova.virt.hardware [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1058.744027] env[63538]: DEBUG nova.virt.hardware [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1058.744027] env[63538]: DEBUG nova.virt.hardware [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1058.744027] env[63538]: DEBUG nova.virt.hardware [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1058.744420] env[63538]: DEBUG nova.virt.hardware [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1058.744611] env[63538]: DEBUG nova.virt.hardware [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1058.745079] env[63538]: DEBUG nova.virt.hardware [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1058.747100] env[63538]: DEBUG nova.virt.hardware [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1058.755127] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3803ec29-fcd1-44f7-8c23-0bff709d8256 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.785204] env[63538]: DEBUG oslo_vmware.api [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101554, 'name': Rename_Task, 'duration_secs': 0.224482} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.787134] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1058.787523] env[63538]: DEBUG oslo_vmware.api [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1058.787523] env[63538]: value = "task-5101555" [ 1058.787523] env[63538]: _type = "Task" [ 1058.787523] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.787834] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ac3a41e0-5b65-436e-ab34-3661c74d8dad {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.804254] env[63538]: DEBUG oslo_vmware.api [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101555, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.807633] env[63538]: DEBUG oslo_vmware.api [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1058.807633] env[63538]: value = "task-5101556" [ 1058.807633] env[63538]: _type = "Task" [ 1058.807633] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.823274] env[63538]: DEBUG oslo_vmware.api [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101556, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.942509] env[63538]: DEBUG nova.compute.utils [None req-35667ae6-191a-4d40-a40f-303a4257ff5c tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1059.002218] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c0d69b2-a754-4d60-a4f5-e458feaf7430 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.012865] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64706fce-6580-4505-92b0-b582424e5caf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.051947] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f15e196a-1661-4652-9b49-4f68fa8ec136 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.065855] env[63538]: DEBUG oslo_vmware.api [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101543, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.066722] env[63538]: DEBUG oslo_vmware.api [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101553, 'name': RemoveSnapshot_Task, 'duration_secs': 0.889093} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.069528] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Deleted Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1059.074030] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b54336de-abd7-4140-9183-cd97d417a890 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.092279] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1059.092876] env[63538]: DEBUG nova.compute.provider_tree [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1059.094253] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c25e8588-0b6a-4c1e-964f-9fedc177a336 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.101540] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1059.101540] env[63538]: value = "task-5101557" [ 1059.101540] env[63538]: _type = "Task" [ 1059.101540] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.114186] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] VM already powered off {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1059.114514] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1059.115395] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c683750c-cd4c-459b-8888-c7754846845b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.125034] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1059.125034] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-848325a8-7427-4989-8e1d-81cf9d83478a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.302431] env[63538]: DEBUG oslo_vmware.api [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101555, 'name': ReconfigVM_Task, 'duration_secs': 0.259849} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.302799] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Updating instance 'fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9' progress to 33 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1059.313464] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1059.313721] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1059.314008] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Deleting the datastore file [datastore1] 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1059.314746] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-34a02f3e-82e9-496e-aaeb-619b82e33781 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.322026] env[63538]: DEBUG oslo_vmware.api [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101556, 'name': PowerOnVM_Task} progress is 78%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.326526] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1059.326526] env[63538]: value = "task-5101559" [ 1059.326526] env[63538]: _type = "Task" [ 1059.326526] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.339051] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101559, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.446553] env[63538]: DEBUG oslo_concurrency.lockutils [None req-35667ae6-191a-4d40-a40f-303a4257ff5c tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "431a67e6-b90d-4930-9a86-7c49d1022ddc" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.512693] env[63538]: DEBUG oslo_concurrency.lockutils [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "0c19d662-4ae0-4ec9-93b4-9bd45822ed92" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.512693] env[63538]: DEBUG oslo_concurrency.lockutils [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "0c19d662-4ae0-4ec9-93b4-9bd45822ed92" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.558820] env[63538]: DEBUG oslo_vmware.api [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101543, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.581867] env[63538]: WARNING nova.compute.manager [None req-f66e4391-1890-4085-a6e6-cf1ac20dcd74 tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Image not found during snapshot: nova.exception.ImageNotFound: Image 5556071f-96ea-45af-8248-360d719b2d5e could not be found. [ 1059.599036] env[63538]: DEBUG nova.scheduler.client.report [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1059.809753] env[63538]: DEBUG nova.virt.hardware [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1059.810086] env[63538]: DEBUG nova.virt.hardware [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1059.810241] env[63538]: DEBUG nova.virt.hardware [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1059.810441] env[63538]: DEBUG nova.virt.hardware [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1059.810648] env[63538]: DEBUG nova.virt.hardware [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1059.810981] env[63538]: DEBUG nova.virt.hardware [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1059.811161] env[63538]: DEBUG nova.virt.hardware [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1059.811350] env[63538]: DEBUG nova.virt.hardware [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1059.811531] env[63538]: DEBUG nova.virt.hardware [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1059.811703] env[63538]: DEBUG nova.virt.hardware [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1059.811886] env[63538]: DEBUG nova.virt.hardware [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1059.818636] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Reconfiguring VM instance instance-0000005e to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1059.819043] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5346bfb1-21f4-4a2c-9373-21d4492456d2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.847419] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101559, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.322829} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.851425] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1059.851776] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1059.851776] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1059.854377] env[63538]: DEBUG oslo_vmware.api [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1059.854377] env[63538]: value = "task-5101560" [ 1059.854377] env[63538]: _type = "Task" [ 1059.854377] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.854752] env[63538]: DEBUG oslo_vmware.api [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101556, 'name': PowerOnVM_Task, 'duration_secs': 0.905491} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.855209] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1059.855802] env[63538]: INFO nova.compute.manager [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Took 8.55 seconds to spawn the instance on the hypervisor. [ 1059.855802] env[63538]: DEBUG nova.compute.manager [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1059.859546] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c918682-5b0b-481a-8373-92db1beb371d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.870330] env[63538]: DEBUG oslo_vmware.api [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101560, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.016241] env[63538]: DEBUG nova.compute.manager [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1060.059366] env[63538]: DEBUG oslo_vmware.api [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101543, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.105412] env[63538]: DEBUG oslo_concurrency.lockutils [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.524s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1060.105571] env[63538]: INFO nova.compute.manager [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Migrating [ 1060.114583] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.845s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1060.114880] env[63538]: DEBUG nova.objects.instance [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lazy-loading 'resources' on Instance uuid 87f8bb3e-6f32-4850-ac54-efad0befb268 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1060.258655] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Acquiring lock "6257bf5c-8a1c-4204-9605-cc07491e14ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1060.258959] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Lock "6257bf5c-8a1c-4204-9605-cc07491e14ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1060.369902] env[63538]: DEBUG oslo_vmware.api [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101560, 'name': ReconfigVM_Task, 'duration_secs': 0.320811} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.370224] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Reconfigured VM instance instance-0000005e to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1060.371745] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37c0d032-6c06-4bef-b037-d219565ffced {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.405277] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9/fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1060.407632] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0dcbeacf-6f22-42aa-bb39-15c51c6a0304 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.421197] env[63538]: INFO nova.compute.manager [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Took 18.20 seconds to build instance. [ 1060.430441] env[63538]: DEBUG oslo_vmware.api [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1060.430441] env[63538]: value = "task-5101561" [ 1060.430441] env[63538]: _type = "Task" [ 1060.430441] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.440701] env[63538]: DEBUG oslo_vmware.api [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101561, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.535295] env[63538]: DEBUG oslo_concurrency.lockutils [None req-35667ae6-191a-4d40-a40f-303a4257ff5c tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "431a67e6-b90d-4930-9a86-7c49d1022ddc" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1060.535629] env[63538]: DEBUG oslo_concurrency.lockutils [None req-35667ae6-191a-4d40-a40f-303a4257ff5c tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "431a67e6-b90d-4930-9a86-7c49d1022ddc" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1060.536025] env[63538]: INFO nova.compute.manager [None req-35667ae6-191a-4d40-a40f-303a4257ff5c tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Attaching volume 0ba2d843-8ce4-4e02-a7d6-5ba18dcb0ddd to /dev/sdc [ 1060.542399] env[63538]: DEBUG oslo_concurrency.lockutils [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1060.560273] env[63538]: DEBUG oslo_vmware.api [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101543, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.579140] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77fa1d95-42d3-47a3-842f-a5593fede078 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.587467] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20df939f-8e08-4e3a-86d5-f282c7a35ba3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.604173] env[63538]: DEBUG nova.virt.block_device [None req-35667ae6-191a-4d40-a40f-303a4257ff5c tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Updating existing volume attachment record: e4646416-8747-41a2-9a28-1cc3f4ce3a6b {{(pid=63538) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1060.629575] env[63538]: DEBUG oslo_concurrency.lockutils [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "refresh_cache-466be7db-79e4-49fd-aa3b-56fbe5c60457" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1060.629866] env[63538]: DEBUG oslo_concurrency.lockutils [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired lock "refresh_cache-466be7db-79e4-49fd-aa3b-56fbe5c60457" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.629961] env[63538]: DEBUG nova.network.neutron [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1060.764518] env[63538]: DEBUG nova.compute.manager [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1060.849439] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56313d2c-8f3a-445d-99c1-e0f76d0d3c6a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.852981] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "7ee64b60-9b88-4710-a477-e984fa36a142" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1060.853340] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "7ee64b60-9b88-4710-a477-e984fa36a142" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1060.853587] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "7ee64b60-9b88-4710-a477-e984fa36a142-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1060.853670] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "7ee64b60-9b88-4710-a477-e984fa36a142-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1060.853884] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "7ee64b60-9b88-4710-a477-e984fa36a142-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1060.856339] env[63538]: INFO nova.compute.manager [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Terminating instance [ 1060.860833] env[63538]: DEBUG nova.compute.manager [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1060.861055] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1060.862093] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8158a1be-0370-4bbf-a3ab-6bc56b06be46 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.865848] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-201946d3-c3e9-4315-bb20-0f8eb9f0dbb7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.876554] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1060.902314] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e8ca6b30-be67-4812-85ae-045bfdad4a32 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.907147] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ab6edd7-b852-4fe3-aec4-44a5900817fd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.917155] env[63538]: DEBUG nova.virt.hardware [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1060.917432] env[63538]: DEBUG nova.virt.hardware [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1060.917595] env[63538]: DEBUG nova.virt.hardware [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1060.917787] env[63538]: DEBUG nova.virt.hardware [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1060.917941] env[63538]: DEBUG nova.virt.hardware [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1060.918167] env[63538]: DEBUG nova.virt.hardware [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1060.918491] env[63538]: DEBUG nova.virt.hardware [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1060.918680] env[63538]: DEBUG nova.virt.hardware [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1060.918912] env[63538]: DEBUG nova.virt.hardware [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1060.919104] env[63538]: DEBUG nova.virt.hardware [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1060.919295] env[63538]: DEBUG nova.virt.hardware [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1060.920674] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1427e9d-57e4-4104-87ae-c74cdead44be {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.926080] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b23d88b-fd25-4bb9-ae1e-199c41a6c61b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.930010] env[63538]: DEBUG oslo_concurrency.lockutils [None req-20b1c1fd-aca4-4d5e-a74e-2c1fb38b607c tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "cbd40984-29b6-4ed9-8c87-9fd4c80f6f13" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.719s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1060.930448] env[63538]: DEBUG oslo_vmware.api [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 1060.930448] env[63538]: value = "task-5101563" [ 1060.930448] env[63538]: _type = "Task" [ 1060.930448] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.947599] env[63538]: DEBUG nova.compute.provider_tree [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1060.950560] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-785ed943-da3b-48ed-84e9-f8ecb53e973f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.958912] env[63538]: DEBUG oslo_vmware.api [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101561, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.964280] env[63538]: DEBUG oslo_vmware.api [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101563, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.972915] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:9a:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c621a9c-66f5-426a-8aab-bd8b2e912106', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'da8361d9-226a-4b83-967f-41c85a0d4920', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1060.982083] env[63538]: DEBUG oslo.service.loopingcall [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1060.982862] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1060.983226] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c7f58ecc-c742-4fe5-ae4f-5558e08e0ad9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.002599] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9691345-2aeb-46cb-8a7b-fa972b071873 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.008557] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1061.008557] env[63538]: value = "task-5101564" [ 1061.008557] env[63538]: _type = "Task" [ 1061.008557] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.010967] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a89963f5-3741-43d4-9bea-d1f9d1656080 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Suspending the VM {{(pid=63538) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1163}} [ 1061.014500] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-9cf10554-44ef-4685-8c38-83ff5498b185 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.022603] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101564, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.024192] env[63538]: DEBUG oslo_vmware.api [None req-a89963f5-3741-43d4-9bea-d1f9d1656080 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1061.024192] env[63538]: value = "task-5101565" [ 1061.024192] env[63538]: _type = "Task" [ 1061.024192] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.034276] env[63538]: DEBUG oslo_vmware.api [None req-a89963f5-3741-43d4-9bea-d1f9d1656080 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101565, 'name': SuspendVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.065197] env[63538]: DEBUG oslo_vmware.api [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101543, 'name': ReconfigVM_Task, 'duration_secs': 6.828199} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.066300] env[63538]: DEBUG oslo_concurrency.lockutils [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1061.066594] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Reconfigured VM to detach interface {{(pid=63538) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1984}} [ 1061.298762] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.451383] env[63538]: DEBUG oslo_vmware.api [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101561, 'name': ReconfigVM_Task, 'duration_secs': 0.688859} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.459875] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Reconfigured VM instance instance-0000005e to attach disk [datastore2] fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9/fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1061.460350] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Updating instance 'fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9' progress to 50 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1061.466459] env[63538]: DEBUG nova.scheduler.client.report [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1061.471373] env[63538]: DEBUG oslo_vmware.api [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101563, 'name': PowerOffVM_Task, 'duration_secs': 0.223372} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.474946] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1061.475275] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1061.477013] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e34791b5-5a9e-40e1-bf12-83ce7cdabb6e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.521689] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101564, 'name': CreateVM_Task, 'duration_secs': 0.481343} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.522513] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1061.523378] env[63538]: DEBUG oslo_concurrency.lockutils [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1061.523589] env[63538]: DEBUG oslo_concurrency.lockutils [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.524035] env[63538]: DEBUG oslo_concurrency.lockutils [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1061.524894] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d538c75b-36f4-438f-af7e-cf06bbcb06e4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.538463] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1061.538463] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5241ffc3-8761-d8b8-9371-c9d3f95ad7db" [ 1061.538463] env[63538]: _type = "Task" [ 1061.538463] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.539632] env[63538]: DEBUG nova.network.neutron [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Updating instance_info_cache with network_info: [{"id": "a679ee9b-3e51-4ce7-ab24-0792218d36ba", "address": "fa:16:3e:08:aa:47", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa679ee9b-3e", "ovs_interfaceid": "a679ee9b-3e51-4ce7-ab24-0792218d36ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.547242] env[63538]: DEBUG oslo_vmware.api [None req-a89963f5-3741-43d4-9bea-d1f9d1656080 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101565, 'name': SuspendVM_Task} progress is 58%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.553788] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1061.554061] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1061.554294] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Deleting the datastore file [datastore1] 7ee64b60-9b88-4710-a477-e984fa36a142 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1061.558284] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-edf4a865-4210-4e62-8583-97f5316ef931 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.561035] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5241ffc3-8761-d8b8-9371-c9d3f95ad7db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.567244] env[63538]: DEBUG oslo_vmware.api [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for the task: (returnval){ [ 1061.567244] env[63538]: value = "task-5101567" [ 1061.567244] env[63538]: _type = "Task" [ 1061.567244] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.576853] env[63538]: DEBUG oslo_vmware.api [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101567, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.599579] env[63538]: DEBUG nova.compute.manager [req-0e26d205-8549-46f5-bee7-14920ad00764 req-600b98a2-ce15-4cd9-bd5f-d5b1c00c4b1c service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Received event network-vif-deleted-3d1c322d-13b3-4f3d-a880-d456b548938f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1061.599778] env[63538]: INFO nova.compute.manager [req-0e26d205-8549-46f5-bee7-14920ad00764 req-600b98a2-ce15-4cd9-bd5f-d5b1c00c4b1c service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Neutron deleted interface 3d1c322d-13b3-4f3d-a880-d456b548938f; detaching it from the instance and deleting it from the info cache [ 1061.600085] env[63538]: DEBUG nova.network.neutron [req-0e26d205-8549-46f5-bee7-14920ad00764 req-600b98a2-ce15-4cd9-bd5f-d5b1c00c4b1c service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Updating instance_info_cache with network_info: [{"id": "8240a40a-4486-4213-ac28-8eee15d652a8", "address": "fa:16:3e:41:82:69", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8240a40a-44", "ovs_interfaceid": "8240a40a-4486-4213-ac28-8eee15d652a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c53fd8cc-7c0a-4245-9d76-3fc1b4ecc21f", "address": "fa:16:3e:85:42:c6", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc53fd8cc-7c", "ovs_interfaceid": "c53fd8cc-7c0a-4245-9d76-3fc1b4ecc21f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.974105] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.860s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.978417] env[63538]: DEBUG oslo_concurrency.lockutils [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.436s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1061.980503] env[63538]: INFO nova.compute.claims [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1061.983237] env[63538]: DEBUG oslo_concurrency.lockutils [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "refresh_cache-209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1061.983426] env[63538]: DEBUG oslo_concurrency.lockutils [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "refresh_cache-209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.983646] env[63538]: DEBUG nova.network.neutron [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1061.985499] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df2988e-6e39-4e43-9096-345aee1f3dee {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.008377] env[63538]: INFO nova.scheduler.client.report [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Deleted allocations for instance 87f8bb3e-6f32-4850-ac54-efad0befb268 [ 1062.010182] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab8c531-6daf-452d-a3a4-5e762dc82f38 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.035549] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Updating instance 'fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9' progress to 67 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1062.052367] env[63538]: DEBUG oslo_concurrency.lockutils [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Releasing lock "refresh_cache-466be7db-79e4-49fd-aa3b-56fbe5c60457" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1062.053772] env[63538]: DEBUG oslo_vmware.api [None req-a89963f5-3741-43d4-9bea-d1f9d1656080 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101565, 'name': SuspendVM_Task, 'duration_secs': 0.814425} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.054384] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a89963f5-3741-43d4-9bea-d1f9d1656080 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Suspended the VM {{(pid=63538) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1167}} [ 1062.054580] env[63538]: DEBUG nova.compute.manager [None req-a89963f5-3741-43d4-9bea-d1f9d1656080 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1062.055790] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aba6909-1a81-4977-9eaa-d76fca207999 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.064914] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5241ffc3-8761-d8b8-9371-c9d3f95ad7db, 'name': SearchDatastore_Task, 'duration_secs': 0.022138} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.065716] env[63538]: DEBUG oslo_concurrency.lockutils [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1062.066186] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1062.066259] env[63538]: DEBUG oslo_concurrency.lockutils [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1062.066410] env[63538]: DEBUG oslo_concurrency.lockutils [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.066600] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1062.066890] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-431be6a0-e303-4f0b-a18f-706215e9ccce {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.083404] env[63538]: DEBUG oslo_vmware.api [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Task: {'id': task-5101567, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.300809} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.083621] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1062.083621] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1062.083801] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1062.084077] env[63538]: INFO nova.compute.manager [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1062.084346] env[63538]: DEBUG oslo.service.loopingcall [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1062.085844] env[63538]: DEBUG nova.compute.manager [-] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1062.085983] env[63538]: DEBUG nova.network.neutron [-] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1062.087787] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1062.087988] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1062.088775] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd398e9f-42c3-42aa-be7d-b881b4a6b3ce {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.095745] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1062.095745] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d1bd19-4975-ee55-df49-530784602397" [ 1062.095745] env[63538]: _type = "Task" [ 1062.095745] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.106648] env[63538]: DEBUG oslo_concurrency.lockutils [req-0e26d205-8549-46f5-bee7-14920ad00764 req-600b98a2-ce15-4cd9-bd5f-d5b1c00c4b1c service nova] Acquiring lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1062.106833] env[63538]: DEBUG oslo_concurrency.lockutils [req-0e26d205-8549-46f5-bee7-14920ad00764 req-600b98a2-ce15-4cd9-bd5f-d5b1c00c4b1c service nova] Acquired lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.107482] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d1bd19-4975-ee55-df49-530784602397, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.108297] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdf91337-99fe-43c4-b0a6-6cba2cb62835 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.127745] env[63538]: DEBUG oslo_concurrency.lockutils [req-0e26d205-8549-46f5-bee7-14920ad00764 req-600b98a2-ce15-4cd9-bd5f-d5b1c00c4b1c service nova] Releasing lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1062.128095] env[63538]: WARNING nova.compute.manager [req-0e26d205-8549-46f5-bee7-14920ad00764 req-600b98a2-ce15-4cd9-bd5f-d5b1c00c4b1c service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Detach interface failed, port_id=3d1c322d-13b3-4f3d-a880-d456b548938f, reason: No device with interface-id 3d1c322d-13b3-4f3d-a880-d456b548938f exists on VM: nova.exception.NotFound: No device with interface-id 3d1c322d-13b3-4f3d-a880-d456b548938f exists on VM [ 1062.422528] env[63538]: DEBUG nova.compute.manager [req-25b76db2-bde1-4faa-b7a4-973330473af1 req-40d6cd9d-25fc-48d1-8f97-149eef99da9a service nova] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Received event network-vif-deleted-3d218127-a216-4abc-8a2a-b2e004d627cc {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1062.422528] env[63538]: INFO nova.compute.manager [req-25b76db2-bde1-4faa-b7a4-973330473af1 req-40d6cd9d-25fc-48d1-8f97-149eef99da9a service nova] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Neutron deleted interface 3d218127-a216-4abc-8a2a-b2e004d627cc; detaching it from the instance and deleting it from the info cache [ 1062.422528] env[63538]: DEBUG nova.network.neutron [req-25b76db2-bde1-4faa-b7a4-973330473af1 req-40d6cd9d-25fc-48d1-8f97-149eef99da9a service nova] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.519752] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce6e2a6e-2af4-4730-b738-bc7c96fc67bf tempest-ServerRescueNegativeTestJSON-405097464 tempest-ServerRescueNegativeTestJSON-405097464-project-member] Lock "87f8bb3e-6f32-4850-ac54-efad0befb268" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.297s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.609450] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d1bd19-4975-ee55-df49-530784602397, 'name': SearchDatastore_Task, 'duration_secs': 0.0152} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.610390] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e9b0216-b4fe-4e1d-a7f4-76fb11509977 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.620232] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1062.620232] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525e2ca8-fef7-c2d3-6997-566f64b8f1d8" [ 1062.620232] env[63538]: _type = "Task" [ 1062.620232] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.631462] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525e2ca8-fef7-c2d3-6997-566f64b8f1d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.634601] env[63538]: DEBUG nova.network.neutron [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Port c5eca2fb-a7a7-43a0-a2a0-5016e8c11f78 binding to destination host cpu-1 is already ACTIVE {{(pid=63538) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1062.894731] env[63538]: DEBUG nova.network.neutron [-] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.925984] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c2102f05-db52-4a0e-a462-544d3e1982b7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.941012] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d10d1b4-53a9-49f5-a87e-33050c1222e2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.982439] env[63538]: DEBUG nova.compute.manager [req-25b76db2-bde1-4faa-b7a4-973330473af1 req-40d6cd9d-25fc-48d1-8f97-149eef99da9a service nova] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Detach interface failed, port_id=3d218127-a216-4abc-8a2a-b2e004d627cc, reason: Instance 7ee64b60-9b88-4710-a477-e984fa36a142 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1063.134313] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525e2ca8-fef7-c2d3-6997-566f64b8f1d8, 'name': SearchDatastore_Task, 'duration_secs': 0.014845} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.134685] env[63538]: DEBUG oslo_concurrency.lockutils [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1063.134948] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af/5ef5fe70-fed9-4b3d-9d43-f01cf628d9af.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1063.135266] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c7195088-af99-4749-bd1a-0651c8fa018b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.149742] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1063.149742] env[63538]: value = "task-5101569" [ 1063.149742] env[63538]: _type = "Task" [ 1063.149742] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.164245] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101569, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.269884] env[63538]: DEBUG nova.network.neutron [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Updating instance_info_cache with network_info: [{"id": "8240a40a-4486-4213-ac28-8eee15d652a8", "address": "fa:16:3e:41:82:69", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8240a40a-44", "ovs_interfaceid": "8240a40a-4486-4213-ac28-8eee15d652a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c53fd8cc-7c0a-4245-9d76-3fc1b4ecc21f", "address": "fa:16:3e:85:42:c6", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc53fd8cc-7c", "ovs_interfaceid": "c53fd8cc-7c0a-4245-9d76-3fc1b4ecc21f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.337196] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-620e868c-90ec-465d-b6ee-d5a9dfb77c88 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.351887] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1841805-752f-459d-b666-07a5ef1ceed8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.384533] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03ec5b8-3964-44a4-8c33-7a37978ebbfb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.392610] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a47e91-aab9-4cf5-a63a-811d9031ed85 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.398509] env[63538]: INFO nova.compute.manager [-] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Took 1.31 seconds to deallocate network for instance. [ 1063.408635] env[63538]: DEBUG nova.compute.provider_tree [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1063.434641] env[63538]: DEBUG oslo_concurrency.lockutils [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.435032] env[63538]: DEBUG oslo_concurrency.lockutils [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.435353] env[63538]: DEBUG oslo_concurrency.lockutils [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.435468] env[63538]: DEBUG oslo_concurrency.lockutils [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.435663] env[63538]: DEBUG oslo_concurrency.lockutils [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.438085] env[63538]: INFO nova.compute.manager [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Terminating instance [ 1063.441728] env[63538]: DEBUG nova.compute.manager [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1063.441935] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1063.442837] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba2f41de-7202-46c1-9659-75480f95bd49 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.452220] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1063.452536] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c5d92ee-5640-4a5a-b6b0-2bc3d3628049 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.460187] env[63538]: DEBUG oslo_vmware.api [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1063.460187] env[63538]: value = "task-5101570" [ 1063.460187] env[63538]: _type = "Task" [ 1063.460187] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.469733] env[63538]: DEBUG oslo_vmware.api [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101570, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.501505] env[63538]: DEBUG oslo_concurrency.lockutils [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "cbd40984-29b6-4ed9-8c87-9fd4c80f6f13" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.501886] env[63538]: DEBUG oslo_concurrency.lockutils [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "cbd40984-29b6-4ed9-8c87-9fd4c80f6f13" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.502325] env[63538]: DEBUG oslo_concurrency.lockutils [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "cbd40984-29b6-4ed9-8c87-9fd4c80f6f13-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.502469] env[63538]: DEBUG oslo_concurrency.lockutils [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "cbd40984-29b6-4ed9-8c87-9fd4c80f6f13-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.503246] env[63538]: DEBUG oslo_concurrency.lockutils [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "cbd40984-29b6-4ed9-8c87-9fd4c80f6f13-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.505280] env[63538]: INFO nova.compute.manager [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Terminating instance [ 1063.507536] env[63538]: DEBUG nova.compute.manager [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1063.507993] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1063.508670] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-970bfc9c-de3e-4698-8d4e-f5a6026933f2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.517694] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1063.518692] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c55793a6-14b6-459e-b178-64704c851684 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.581093] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea9fc40-7b6c-4f22-bd43-7909c098d601 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.604267] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Updating instance '466be7db-79e4-49fd-aa3b-56fbe5c60457' progress to 0 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1063.625382] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1063.625652] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1063.625881] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Deleting the datastore file [datastore1] cbd40984-29b6-4ed9-8c87-9fd4c80f6f13 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1063.626383] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f38d601d-8246-4113-9a03-8bed2458b8ef {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.637883] env[63538]: DEBUG oslo_vmware.api [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1063.637883] env[63538]: value = "task-5101572" [ 1063.637883] env[63538]: _type = "Task" [ 1063.637883] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.656741] env[63538]: DEBUG oslo_vmware.api [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101572, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.670542] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.670904] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.671255] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.684737] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101569, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.773231] env[63538]: DEBUG oslo_concurrency.lockutils [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "refresh_cache-209c5f46-9c63-4f55-bc75-bc2e4da989ac" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1063.914233] env[63538]: DEBUG nova.scheduler.client.report [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1063.918267] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.975871] env[63538]: DEBUG oslo_vmware.api [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101570, 'name': PowerOffVM_Task, 'duration_secs': 0.237518} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.979784] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1063.979784] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1063.979784] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b4df046d-3507-4c87-8f65-9aaa5c5d6052 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.091438] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1064.091986] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1064.092450] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Deleting the datastore file [datastore2] 209c5f46-9c63-4f55-bc75-bc2e4da989ac {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1064.092938] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-45a754b1-b9b1-48d8-bd74-821c147a6b11 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.101557] env[63538]: DEBUG oslo_vmware.api [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1064.101557] env[63538]: value = "task-5101574" [ 1064.101557] env[63538]: _type = "Task" [ 1064.101557] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.112356] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1064.112924] env[63538]: DEBUG oslo_vmware.api [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101574, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.112924] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2d9599a4-0760-43ca-a5ed-3328d8596fa9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.119956] env[63538]: DEBUG oslo_vmware.api [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1064.119956] env[63538]: value = "task-5101575" [ 1064.119956] env[63538]: _type = "Task" [ 1064.119956] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.130078] env[63538]: DEBUG oslo_vmware.api [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101575, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.148569] env[63538]: DEBUG oslo_vmware.api [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101572, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.402316} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.149079] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1064.149126] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1064.149304] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1064.149568] env[63538]: INFO nova.compute.manager [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Took 0.64 seconds to destroy the instance on the hypervisor. [ 1064.149889] env[63538]: DEBUG oslo.service.loopingcall [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1064.150127] env[63538]: DEBUG nova.compute.manager [-] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1064.150230] env[63538]: DEBUG nova.network.neutron [-] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1064.168208] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101569, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.618724} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.168455] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af/5ef5fe70-fed9-4b3d-9d43-f01cf628d9af.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1064.168888] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1064.169103] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4487cb23-d546-4ad6-82ba-f4907a2e720b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.180179] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1064.180179] env[63538]: value = "task-5101576" [ 1064.180179] env[63538]: _type = "Task" [ 1064.180179] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.193094] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101576, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.277816] env[63538]: DEBUG oslo_concurrency.lockutils [None req-254c8831-1bc3-4d94-9f49-badaf5349690 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "interface-209c5f46-9c63-4f55-bc75-bc2e4da989ac-3d1c322d-13b3-4f3d-a880-d456b548938f" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.909s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.421468] env[63538]: DEBUG oslo_concurrency.lockutils [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.443s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.422255] env[63538]: DEBUG nova.compute.manager [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1064.428022] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.127s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.428375] env[63538]: INFO nova.compute.claims [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1064.465403] env[63538]: DEBUG nova.compute.manager [req-830f181d-ba11-446a-b5b9-82c8b279e5e7 req-e2eb426e-2759-43dc-a4bd-5b2ffc978587 service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Received event network-vif-deleted-c53fd8cc-7c0a-4245-9d76-3fc1b4ecc21f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1064.465403] env[63538]: INFO nova.compute.manager [req-830f181d-ba11-446a-b5b9-82c8b279e5e7 req-e2eb426e-2759-43dc-a4bd-5b2ffc978587 service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Neutron deleted interface c53fd8cc-7c0a-4245-9d76-3fc1b4ecc21f; detaching it from the instance and deleting it from the info cache [ 1064.465403] env[63538]: DEBUG nova.network.neutron [req-830f181d-ba11-446a-b5b9-82c8b279e5e7 req-e2eb426e-2759-43dc-a4bd-5b2ffc978587 service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Updating instance_info_cache with network_info: [{"id": "8240a40a-4486-4213-ac28-8eee15d652a8", "address": "fa:16:3e:41:82:69", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8240a40a-44", "ovs_interfaceid": "8240a40a-4486-4213-ac28-8eee15d652a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1064.612990] env[63538]: DEBUG oslo_vmware.api [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101574, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.389652} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.613328] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1064.613533] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1064.613714] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1064.613900] env[63538]: INFO nova.compute.manager [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1064.614163] env[63538]: DEBUG oslo.service.loopingcall [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1064.614535] env[63538]: DEBUG nova.compute.manager [-] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1064.614655] env[63538]: DEBUG nova.network.neutron [-] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1064.629831] env[63538]: DEBUG oslo_vmware.api [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101575, 'name': PowerOffVM_Task, 'duration_secs': 0.27659} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.630181] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1064.630540] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Updating instance '466be7db-79e4-49fd-aa3b-56fbe5c60457' progress to 17 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1064.672871] env[63538]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port c53fd8cc-7c0a-4245-9d76-3fc1b4ecc21f could not be found.", "detail": ""}} {{(pid=63538) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1064.673144] env[63538]: DEBUG nova.network.neutron [-] Unable to show port c53fd8cc-7c0a-4245-9d76-3fc1b4ecc21f as it no longer exists. {{(pid=63538) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 1064.694197] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101576, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078413} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.695409] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1064.696186] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3413056-b423-40a9-b0e2-761d51ea07d7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.730401] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af/5ef5fe70-fed9-4b3d-9d43-f01cf628d9af.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1064.733123] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f662f22-54d9-4b17-a81c-5667c4b1ec39 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.752092] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "refresh_cache-fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1064.752299] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired lock "refresh_cache-fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.752472] env[63538]: DEBUG nova.network.neutron [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1064.760712] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1064.760712] env[63538]: value = "task-5101577" [ 1064.760712] env[63538]: _type = "Task" [ 1064.760712] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.771570] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101577, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.934205] env[63538]: DEBUG nova.compute.utils [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1064.938955] env[63538]: DEBUG nova.compute.manager [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1064.939153] env[63538]: DEBUG nova.network.neutron [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1064.969226] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-98d2b7d8-7cde-4a3a-8507-c357f14a3783 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.982861] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e7ddd29-495e-4b2e-bafc-1e56859e8c3d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.011373] env[63538]: DEBUG nova.policy [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '884364daa8f746fb9b32a8b33c9e2cfd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea05f3fb4676466bb2a286f5a2fefb8f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1065.013270] env[63538]: DEBUG nova.network.neutron [-] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.817603] env[63538]: DEBUG nova.virt.hardware [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1065.817931] env[63538]: DEBUG nova.virt.hardware [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1065.817989] env[63538]: DEBUG nova.virt.hardware [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1065.818145] env[63538]: DEBUG nova.virt.hardware [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1065.818300] env[63538]: DEBUG nova.virt.hardware [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1065.818445] env[63538]: DEBUG nova.virt.hardware [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1065.818645] env[63538]: DEBUG nova.virt.hardware [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1065.818826] env[63538]: DEBUG nova.virt.hardware [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1065.819022] env[63538]: DEBUG nova.virt.hardware [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1065.819202] env[63538]: DEBUG nova.virt.hardware [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1065.819378] env[63538]: DEBUG nova.virt.hardware [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1065.827320] env[63538]: DEBUG nova.compute.manager [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1065.834171] env[63538]: DEBUG nova.compute.manager [req-830f181d-ba11-446a-b5b9-82c8b279e5e7 req-e2eb426e-2759-43dc-a4bd-5b2ffc978587 service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Detach interface failed, port_id=c53fd8cc-7c0a-4245-9d76-3fc1b4ecc21f, reason: Instance 209c5f46-9c63-4f55-bc75-bc2e4da989ac could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1065.835612] env[63538]: INFO nova.compute.manager [-] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Took 1.69 seconds to deallocate network for instance. [ 1065.837706] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-35667ae6-191a-4d40-a40f-303a4257ff5c tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Volume attach. Driver type: vmdk {{(pid=63538) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1065.837706] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-35667ae6-191a-4d40-a40f-303a4257ff5c tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992495', 'volume_id': '0ba2d843-8ce4-4e02-a7d6-5ba18dcb0ddd', 'name': 'volume-0ba2d843-8ce4-4e02-a7d6-5ba18dcb0ddd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '431a67e6-b90d-4930-9a86-7c49d1022ddc', 'attached_at': '', 'detached_at': '', 'volume_id': '0ba2d843-8ce4-4e02-a7d6-5ba18dcb0ddd', 'serial': '0ba2d843-8ce4-4e02-a7d6-5ba18dcb0ddd'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1065.837706] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5bde64ea-7ec9-4367-8384-109a70886de7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.854501] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3427321f-4050-4f48-b481-6fa0e5a671ea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.861161] env[63538]: DEBUG nova.network.neutron [-] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.882211] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101577, 'name': ReconfigVM_Task, 'duration_secs': 0.490471} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.886439] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af/5ef5fe70-fed9-4b3d-9d43-f01cf628d9af.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1065.887677] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d6c8a1-ed8a-476e-a160-e3a7ef0e8415 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.890759] env[63538]: DEBUG oslo_vmware.api [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1065.890759] env[63538]: value = "task-5101578" [ 1065.890759] env[63538]: _type = "Task" [ 1065.890759] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.891180] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4221a72d-04e9-4d03-b195-e54e1c5b96ad {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.922689] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-35667ae6-191a-4d40-a40f-303a4257ff5c tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] volume-0ba2d843-8ce4-4e02-a7d6-5ba18dcb0ddd/volume-0ba2d843-8ce4-4e02-a7d6-5ba18dcb0ddd.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1065.927967] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b60abd4-a62d-4dc5-9d0b-f07c1d24261d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.941623] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1065.941623] env[63538]: value = "task-5101579" [ 1065.941623] env[63538]: _type = "Task" [ 1065.941623] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.948419] env[63538]: DEBUG oslo_vmware.api [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101578, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.955168] env[63538]: DEBUG oslo_vmware.api [None req-35667ae6-191a-4d40-a40f-303a4257ff5c tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 1065.955168] env[63538]: value = "task-5101580" [ 1065.955168] env[63538]: _type = "Task" [ 1065.955168] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.962686] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101579, 'name': Rename_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.972847] env[63538]: DEBUG oslo_vmware.api [None req-35667ae6-191a-4d40-a40f-303a4257ff5c tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101580, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.210249] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d1bf25c-1c40-4339-85a7-ea6dffd736b8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.221018] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-319bfa5c-84b2-45d7-990e-d6666a001e0a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.253980] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ac7a561-3d5f-4b8b-9e7a-ace52092ba7c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.263457] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-790706b6-531b-4daf-85f0-6f0ac2b46873 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.279481] env[63538]: DEBUG nova.compute.provider_tree [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1066.363346] env[63538]: INFO nova.compute.manager [-] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Took 1.75 seconds to deallocate network for instance. [ 1066.365875] env[63538]: DEBUG oslo_concurrency.lockutils [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1066.386333] env[63538]: DEBUG nova.network.neutron [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Updating instance_info_cache with network_info: [{"id": "c5eca2fb-a7a7-43a0-a2a0-5016e8c11f78", "address": "fa:16:3e:85:66:7a", "network": {"id": "8facc2ff-7a17-4beb-ad4f-7cf9d95cc93c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-137157780-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a701618902d411b8af203fdbb1069be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5eca2fb-a7", "ovs_interfaceid": "c5eca2fb-a7a7-43a0-a2a0-5016e8c11f78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1066.410540] env[63538]: DEBUG oslo_vmware.api [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101578, 'name': ReconfigVM_Task, 'duration_secs': 0.314403} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.411255] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Updating instance '466be7db-79e4-49fd-aa3b-56fbe5c60457' progress to 33 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1066.459417] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101579, 'name': Rename_Task, 'duration_secs': 0.167061} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.463074] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1066.463626] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e5c67d8a-2326-4b9a-9ea6-3eb9fa502776 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.471472] env[63538]: DEBUG oslo_vmware.api [None req-35667ae6-191a-4d40-a40f-303a4257ff5c tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101580, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.472055] env[63538]: DEBUG nova.network.neutron [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Successfully created port: 07db4bb7-d084-4f5d-89b1-07062ff397f6 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1066.475376] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1066.475376] env[63538]: value = "task-5101581" [ 1066.475376] env[63538]: _type = "Task" [ 1066.475376] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.485659] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101581, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.521055] env[63538]: DEBUG oslo_vmware.rw_handles [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52633ee1-8a3a-718b-6e27-14b9986e06d7/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1066.522136] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30338306-e4fd-40ee-9a9b-80aad1c90517 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.529231] env[63538]: DEBUG oslo_vmware.rw_handles [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52633ee1-8a3a-718b-6e27-14b9986e06d7/disk-0.vmdk is in state: ready. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1066.529418] env[63538]: ERROR oslo_vmware.rw_handles [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52633ee1-8a3a-718b-6e27-14b9986e06d7/disk-0.vmdk due to incomplete transfer. [ 1066.529659] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-64363900-4ecd-47d2-bcd4-29e443507b37 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.538804] env[63538]: DEBUG oslo_vmware.rw_handles [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52633ee1-8a3a-718b-6e27-14b9986e06d7/disk-0.vmdk. {{(pid=63538) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1066.539070] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Uploaded image 2e0382b2-f42e-4c8c-8f41-ce9c70949ae8 to the Glance image server {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 1066.541811] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Destroying the VM {{(pid=63538) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 1066.542183] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e3276bf5-ea65-4d0e-a1ad-be1712eee582 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.550610] env[63538]: DEBUG oslo_vmware.api [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1066.550610] env[63538]: value = "task-5101582" [ 1066.550610] env[63538]: _type = "Task" [ 1066.550610] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.560975] env[63538]: DEBUG oslo_vmware.api [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101582, 'name': Destroy_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.563919] env[63538]: DEBUG nova.compute.manager [req-95925e44-d35d-4e71-ba03-82107bb666fe req-65d91e07-6f1c-4791-ad8d-ae9e8328aa1f service nova] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Received event network-vif-deleted-c7228e85-42e9-42ec-b707-620c264a0f37 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1066.564095] env[63538]: DEBUG nova.compute.manager [req-95925e44-d35d-4e71-ba03-82107bb666fe req-65d91e07-6f1c-4791-ad8d-ae9e8328aa1f service nova] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Received event network-vif-deleted-8240a40a-4486-4213-ac28-8eee15d652a8 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1066.782608] env[63538]: DEBUG nova.scheduler.client.report [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1066.842913] env[63538]: DEBUG nova.compute.manager [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1066.868718] env[63538]: DEBUG nova.virt.hardware [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1066.869171] env[63538]: DEBUG nova.virt.hardware [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1066.869419] env[63538]: DEBUG nova.virt.hardware [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1066.869708] env[63538]: DEBUG nova.virt.hardware [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1066.869968] env[63538]: DEBUG nova.virt.hardware [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1066.870235] env[63538]: DEBUG nova.virt.hardware [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1066.870633] env[63538]: DEBUG nova.virt.hardware [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1066.870922] env[63538]: DEBUG nova.virt.hardware [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1066.871155] env[63538]: DEBUG nova.virt.hardware [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1066.871423] env[63538]: DEBUG nova.virt.hardware [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1066.871698] env[63538]: DEBUG nova.virt.hardware [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1066.873320] env[63538]: DEBUG oslo_concurrency.lockutils [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1066.874410] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28b04966-3f8a-4c37-b1ab-8f94565dc8da {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.884507] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f3a6a3-1274-422d-bda8-06543dcf5b51 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.889250] env[63538]: DEBUG oslo_concurrency.lockutils [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Releasing lock "refresh_cache-fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1066.919844] env[63538]: DEBUG nova.virt.hardware [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1066.920180] env[63538]: DEBUG nova.virt.hardware [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1066.920372] env[63538]: DEBUG nova.virt.hardware [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1066.920566] env[63538]: DEBUG nova.virt.hardware [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1066.920718] env[63538]: DEBUG nova.virt.hardware [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1066.920900] env[63538]: DEBUG nova.virt.hardware [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1066.921145] env[63538]: DEBUG nova.virt.hardware [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1066.921576] env[63538]: DEBUG nova.virt.hardware [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1066.921576] env[63538]: DEBUG nova.virt.hardware [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1066.921653] env[63538]: DEBUG nova.virt.hardware [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1066.921884] env[63538]: DEBUG nova.virt.hardware [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1066.929662] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Reconfiguring VM instance instance-00000030 to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1066.930107] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f04f7fa-711c-4b38-9971-534dc158fa7b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.952212] env[63538]: DEBUG oslo_vmware.api [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1066.952212] env[63538]: value = "task-5101583" [ 1066.952212] env[63538]: _type = "Task" [ 1066.952212] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.962948] env[63538]: DEBUG oslo_vmware.api [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101583, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.972821] env[63538]: DEBUG oslo_vmware.api [None req-35667ae6-191a-4d40-a40f-303a4257ff5c tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101580, 'name': ReconfigVM_Task, 'duration_secs': 0.750829} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.973019] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-35667ae6-191a-4d40-a40f-303a4257ff5c tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Reconfigured VM instance instance-00000058 to attach disk [datastore1] volume-0ba2d843-8ce4-4e02-a7d6-5ba18dcb0ddd/volume-0ba2d843-8ce4-4e02-a7d6-5ba18dcb0ddd.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1066.979030] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1244c2fa-2c58-4042-97c8-36a68c1ef9e6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.998133] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101581, 'name': PowerOnVM_Task, 'duration_secs': 0.516341} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.999557] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1066.999786] env[63538]: DEBUG nova.compute.manager [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1067.000147] env[63538]: DEBUG oslo_vmware.api [None req-35667ae6-191a-4d40-a40f-303a4257ff5c tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 1067.000147] env[63538]: value = "task-5101584" [ 1067.000147] env[63538]: _type = "Task" [ 1067.000147] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.000864] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825fa941-c1f1-49ce-b8a1-e3c917b6392c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.017489] env[63538]: DEBUG oslo_vmware.api [None req-35667ae6-191a-4d40-a40f-303a4257ff5c tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101584, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.062114] env[63538]: DEBUG oslo_vmware.api [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101582, 'name': Destroy_Task, 'duration_secs': 0.407199} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.062729] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Destroyed the VM [ 1067.062729] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Deleting Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1067.063058] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-91c2b9fa-5a61-45a8-accc-2d42d741bff5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.073648] env[63538]: DEBUG oslo_vmware.api [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1067.073648] env[63538]: value = "task-5101585" [ 1067.073648] env[63538]: _type = "Task" [ 1067.073648] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.082777] env[63538]: DEBUG oslo_vmware.api [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101585, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.291499] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.865s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.291964] env[63538]: DEBUG nova.compute.manager [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1067.294901] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.377s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.295160] env[63538]: DEBUG nova.objects.instance [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lazy-loading 'resources' on Instance uuid 7ee64b60-9b88-4710-a477-e984fa36a142 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1067.416650] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f2cbfc5-c0ac-44a7-bcca-0cefaf9329e8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.438171] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bf7844d-7cf1-43be-b41a-84727f5dd191 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.446717] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Updating instance 'fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9' progress to 83 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1067.469382] env[63538]: DEBUG oslo_vmware.api [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101583, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.516091] env[63538]: DEBUG oslo_vmware.api [None req-35667ae6-191a-4d40-a40f-303a4257ff5c tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101584, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.518698] env[63538]: INFO nova.compute.manager [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] bringing vm to original state: 'stopped' [ 1067.583623] env[63538]: DEBUG oslo_vmware.api [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101585, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.801683] env[63538]: DEBUG nova.compute.utils [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1067.803437] env[63538]: DEBUG nova.compute.manager [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1067.803599] env[63538]: DEBUG nova.network.neutron [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1067.851525] env[63538]: DEBUG nova.policy [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a0c09c9f5acc49e0bd4b6d7825f91c5e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9294e4310e484e338932e3514d079594', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1067.956226] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1067.957038] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f59f698f-224e-419a-968c-5c6182c162ea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.976577] env[63538]: DEBUG oslo_vmware.api [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101583, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.976866] env[63538]: DEBUG oslo_vmware.api [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1067.976866] env[63538]: value = "task-5101586" [ 1067.976866] env[63538]: _type = "Task" [ 1067.976866] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.985203] env[63538]: DEBUG oslo_vmware.api [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101586, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.015969] env[63538]: DEBUG oslo_vmware.api [None req-35667ae6-191a-4d40-a40f-303a4257ff5c tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101584, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.032249] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be940f86-6efc-47f6-9e9e-0ee7898d7e14 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.041485] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27984e0a-5256-4032-9b5e-f50c3776f42b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.081978] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5459d0b9-a850-4446-8bd4-89d9a2a11f24 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.091785] env[63538]: DEBUG oslo_vmware.api [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101585, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.096366] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c1d235b-e5c7-4620-8ab3-d5f3a371c2b7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.113492] env[63538]: DEBUG nova.compute.provider_tree [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1068.279729] env[63538]: DEBUG nova.network.neutron [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Successfully created port: 8826b736-8295-4d09-8211-ccda578b133e {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1068.306799] env[63538]: DEBUG nova.compute.manager [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1068.465875] env[63538]: DEBUG nova.compute.manager [req-a74d7746-fc59-4bd3-9388-94c59323c796 req-3067ec6e-d65c-4a6f-a0bd-4644d20e6921 service nova] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Received event network-vif-plugged-07db4bb7-d084-4f5d-89b1-07062ff397f6 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1068.467036] env[63538]: DEBUG oslo_concurrency.lockutils [req-a74d7746-fc59-4bd3-9388-94c59323c796 req-3067ec6e-d65c-4a6f-a0bd-4644d20e6921 service nova] Acquiring lock "0c19d662-4ae0-4ec9-93b4-9bd45822ed92-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1068.467036] env[63538]: DEBUG oslo_concurrency.lockutils [req-a74d7746-fc59-4bd3-9388-94c59323c796 req-3067ec6e-d65c-4a6f-a0bd-4644d20e6921 service nova] Lock "0c19d662-4ae0-4ec9-93b4-9bd45822ed92-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1068.467036] env[63538]: DEBUG oslo_concurrency.lockutils [req-a74d7746-fc59-4bd3-9388-94c59323c796 req-3067ec6e-d65c-4a6f-a0bd-4644d20e6921 service nova] Lock "0c19d662-4ae0-4ec9-93b4-9bd45822ed92-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1068.467410] env[63538]: DEBUG nova.compute.manager [req-a74d7746-fc59-4bd3-9388-94c59323c796 req-3067ec6e-d65c-4a6f-a0bd-4644d20e6921 service nova] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] No waiting events found dispatching network-vif-plugged-07db4bb7-d084-4f5d-89b1-07062ff397f6 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1068.467588] env[63538]: WARNING nova.compute.manager [req-a74d7746-fc59-4bd3-9388-94c59323c796 req-3067ec6e-d65c-4a6f-a0bd-4644d20e6921 service nova] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Received unexpected event network-vif-plugged-07db4bb7-d084-4f5d-89b1-07062ff397f6 for instance with vm_state building and task_state spawning. [ 1068.476786] env[63538]: DEBUG oslo_vmware.api [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101583, 'name': ReconfigVM_Task, 'duration_secs': 1.155867} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.477206] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Reconfigured VM instance instance-00000030 to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1068.478481] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e7db494-7b03-4f76-9745-050ae6338199 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.497373] env[63538]: DEBUG oslo_vmware.api [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101586, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.531189] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] 466be7db-79e4-49fd-aa3b-56fbe5c60457/466be7db-79e4-49fd-aa3b-56fbe5c60457.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1068.536682] env[63538]: DEBUG oslo_concurrency.lockutils [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "5ef5fe70-fed9-4b3d-9d43-f01cf628d9af" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1068.537107] env[63538]: DEBUG oslo_concurrency.lockutils [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "5ef5fe70-fed9-4b3d-9d43-f01cf628d9af" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1068.537433] env[63538]: DEBUG nova.compute.manager [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1068.537820] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85717a9f-da98-4bb6-81fe-a507ad863fb8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.562582] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d0ee1a3-13d5-4af4-8cdd-4f0f9e6af89e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.573306] env[63538]: DEBUG nova.compute.manager [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63538) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1068.581296] env[63538]: DEBUG oslo_vmware.api [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1068.581296] env[63538]: value = "task-5101587" [ 1068.581296] env[63538]: _type = "Task" [ 1068.581296] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.581783] env[63538]: DEBUG oslo_vmware.api [None req-35667ae6-191a-4d40-a40f-303a4257ff5c tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101584, 'name': ReconfigVM_Task, 'duration_secs': 1.123567} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.582509] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1068.589022] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-35667ae6-191a-4d40-a40f-303a4257ff5c tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992495', 'volume_id': '0ba2d843-8ce4-4e02-a7d6-5ba18dcb0ddd', 'name': 'volume-0ba2d843-8ce4-4e02-a7d6-5ba18dcb0ddd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '431a67e6-b90d-4930-9a86-7c49d1022ddc', 'attached_at': '', 'detached_at': '', 'volume_id': '0ba2d843-8ce4-4e02-a7d6-5ba18dcb0ddd', 'serial': '0ba2d843-8ce4-4e02-a7d6-5ba18dcb0ddd'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1068.589571] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-793cc068-3540-488e-affd-6504ee316773 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.606518] env[63538]: DEBUG oslo_vmware.api [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101585, 'name': RemoveSnapshot_Task, 'duration_secs': 1.390468} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.611390] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Deleted Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1068.611695] env[63538]: INFO nova.compute.manager [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Took 16.73 seconds to snapshot the instance on the hypervisor. [ 1068.618030] env[63538]: DEBUG oslo_vmware.api [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101587, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.618030] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1068.618030] env[63538]: value = "task-5101588" [ 1068.618030] env[63538]: _type = "Task" [ 1068.618030] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.618030] env[63538]: DEBUG nova.scheduler.client.report [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1068.632326] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101588, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.988347] env[63538]: DEBUG oslo_vmware.api [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101586, 'name': PowerOnVM_Task, 'duration_secs': 0.655728} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.988722] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1068.988835] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-cc525c68-6561-4248-9fc7-45edbdfa63d4 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Updating instance 'fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9' progress to 100 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1069.094133] env[63538]: DEBUG oslo_vmware.api [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101587, 'name': ReconfigVM_Task, 'duration_secs': 0.298971} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.094319] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Reconfigured VM instance instance-00000030 to attach disk [datastore2] 466be7db-79e4-49fd-aa3b-56fbe5c60457/466be7db-79e4-49fd-aa3b-56fbe5c60457.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1069.094449] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Updating instance '466be7db-79e4-49fd-aa3b-56fbe5c60457' progress to 50 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1069.108746] env[63538]: DEBUG nova.network.neutron [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Successfully updated port: 07db4bb7-d084-4f5d-89b1-07062ff397f6 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1069.126452] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.831s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.130188] env[63538]: DEBUG oslo_concurrency.lockutils [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.765s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1069.130739] env[63538]: DEBUG nova.objects.instance [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lazy-loading 'resources' on Instance uuid cbd40984-29b6-4ed9-8c87-9fd4c80f6f13 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1069.137924] env[63538]: DEBUG oslo_vmware.api [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101588, 'name': PowerOffVM_Task, 'duration_secs': 0.210731} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.140507] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1069.140704] env[63538]: DEBUG nova.compute.manager [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1069.143136] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e103d5bf-b372-4bcd-93a5-409871dbae4f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.147922] env[63538]: DEBUG nova.compute.manager [req-b6cb26da-4d25-4835-9846-6bff7ec75393 req-7f2ade9a-6913-49bb-b7d6-2b0d22127ee4 service nova] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Received event network-changed-07db4bb7-d084-4f5d-89b1-07062ff397f6 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1069.148148] env[63538]: DEBUG nova.compute.manager [req-b6cb26da-4d25-4835-9846-6bff7ec75393 req-7f2ade9a-6913-49bb-b7d6-2b0d22127ee4 service nova] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Refreshing instance network info cache due to event network-changed-07db4bb7-d084-4f5d-89b1-07062ff397f6. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1069.148393] env[63538]: DEBUG oslo_concurrency.lockutils [req-b6cb26da-4d25-4835-9846-6bff7ec75393 req-7f2ade9a-6913-49bb-b7d6-2b0d22127ee4 service nova] Acquiring lock "refresh_cache-0c19d662-4ae0-4ec9-93b4-9bd45822ed92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1069.148592] env[63538]: DEBUG oslo_concurrency.lockutils [req-b6cb26da-4d25-4835-9846-6bff7ec75393 req-7f2ade9a-6913-49bb-b7d6-2b0d22127ee4 service nova] Acquired lock "refresh_cache-0c19d662-4ae0-4ec9-93b4-9bd45822ed92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.148778] env[63538]: DEBUG nova.network.neutron [req-b6cb26da-4d25-4835-9846-6bff7ec75393 req-7f2ade9a-6913-49bb-b7d6-2b0d22127ee4 service nova] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Refreshing network info cache for port 07db4bb7-d084-4f5d-89b1-07062ff397f6 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1069.151872] env[63538]: INFO nova.scheduler.client.report [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Deleted allocations for instance 7ee64b60-9b88-4710-a477-e984fa36a142 [ 1069.185929] env[63538]: DEBUG nova.compute.manager [None req-a6c9b469-53b6-4b43-b866-5e72c9db0a4b tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Found 1 images (rotation: 2) {{(pid=63538) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1069.320366] env[63538]: DEBUG nova.compute.manager [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1069.349874] env[63538]: DEBUG nova.virt.hardware [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1069.350154] env[63538]: DEBUG nova.virt.hardware [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1069.350378] env[63538]: DEBUG nova.virt.hardware [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1069.350585] env[63538]: DEBUG nova.virt.hardware [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1069.350776] env[63538]: DEBUG nova.virt.hardware [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1069.351212] env[63538]: DEBUG nova.virt.hardware [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1069.351212] env[63538]: DEBUG nova.virt.hardware [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1069.351344] env[63538]: DEBUG nova.virt.hardware [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1069.351534] env[63538]: DEBUG nova.virt.hardware [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1069.351719] env[63538]: DEBUG nova.virt.hardware [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1069.351899] env[63538]: DEBUG nova.virt.hardware [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1069.352780] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb4250c6-eb8e-40a2-b902-bd59ab876854 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.361528] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d6bd27-597c-4393-a484-961702e2a38a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.603415] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e95de54-30f0-417b-9621-7e3377a03e27 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.624549] env[63538]: DEBUG oslo_concurrency.lockutils [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "refresh_cache-0c19d662-4ae0-4ec9-93b4-9bd45822ed92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1069.625982] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e604f5d-ce55-477f-91f1-2a055283ba14 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.648464] env[63538]: DEBUG nova.objects.instance [None req-35667ae6-191a-4d40-a40f-303a4257ff5c tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lazy-loading 'flavor' on Instance uuid 431a67e6-b90d-4930-9a86-7c49d1022ddc {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1069.651031] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Updating instance '466be7db-79e4-49fd-aa3b-56fbe5c60457' progress to 67 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1069.664388] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3ff7c30f-087d-488b-8cc4-fae62dec97ec tempest-ImagesTestJSON-1517434018 tempest-ImagesTestJSON-1517434018-project-member] Lock "7ee64b60-9b88-4710-a477-e984fa36a142" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.811s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.668712] env[63538]: DEBUG oslo_concurrency.lockutils [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "5ef5fe70-fed9-4b3d-9d43-f01cf628d9af" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.132s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.696366] env[63538]: DEBUG nova.network.neutron [req-b6cb26da-4d25-4835-9846-6bff7ec75393 req-7f2ade9a-6913-49bb-b7d6-2b0d22127ee4 service nova] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1069.891508] env[63538]: DEBUG nova.network.neutron [req-b6cb26da-4d25-4835-9846-6bff7ec75393 req-7f2ade9a-6913-49bb-b7d6-2b0d22127ee4 service nova] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.923577] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1dbf00-91ff-40ad-bace-578cc9567483 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.937189] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-136425c1-f148-47cd-8cb0-efe39770e893 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.972843] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff17c25-5519-4ab8-a085-ff4edd45ed60 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.981470] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a7e6514-7cd1-467b-87a6-5034a256ce81 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.007629] env[63538]: DEBUG nova.compute.provider_tree [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1070.152978] env[63538]: DEBUG oslo_concurrency.lockutils [None req-35667ae6-191a-4d40-a40f-303a4257ff5c tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "431a67e6-b90d-4930-9a86-7c49d1022ddc" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 9.617s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1070.182479] env[63538]: DEBUG oslo_concurrency.lockutils [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1070.239427] env[63538]: DEBUG nova.network.neutron [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Port a679ee9b-3e51-4ce7-ab24-0792218d36ba binding to destination host cpu-1 is already ACTIVE {{(pid=63538) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1070.273397] env[63538]: DEBUG nova.compute.manager [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1070.275965] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370d748f-674f-43e6-9bf4-884d47d3d5e9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.300735] env[63538]: DEBUG nova.network.neutron [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Successfully updated port: 8826b736-8295-4d09-8211-ccda578b133e {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1070.395340] env[63538]: DEBUG oslo_concurrency.lockutils [req-b6cb26da-4d25-4835-9846-6bff7ec75393 req-7f2ade9a-6913-49bb-b7d6-2b0d22127ee4 service nova] Releasing lock "refresh_cache-0c19d662-4ae0-4ec9-93b4-9bd45822ed92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1070.395340] env[63538]: DEBUG oslo_concurrency.lockutils [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired lock "refresh_cache-0c19d662-4ae0-4ec9-93b4-9bd45822ed92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.395340] env[63538]: DEBUG nova.network.neutron [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1070.526443] env[63538]: DEBUG nova.scheduler.client.report [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1070.785484] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4d79f2ed-1934-4538-acce-9698fcc7d74b tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "431a67e6-b90d-4930-9a86-7c49d1022ddc" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1070.785484] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4d79f2ed-1934-4538-acce-9698fcc7d74b tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "431a67e6-b90d-4930-9a86-7c49d1022ddc" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1070.793203] env[63538]: INFO nova.compute.manager [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] instance snapshotting [ 1070.793203] env[63538]: DEBUG nova.objects.instance [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lazy-loading 'flavor' on Instance uuid 3d80dc17-e330-4575-8e12-e06d8e76274a {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1070.803742] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Acquiring lock "refresh_cache-6257bf5c-8a1c-4204-9605-cc07491e14ea" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1070.805690] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Acquired lock "refresh_cache-6257bf5c-8a1c-4204-9605-cc07491e14ea" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.805690] env[63538]: DEBUG nova.network.neutron [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1070.942141] env[63538]: DEBUG nova.network.neutron [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1070.982235] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "5ef5fe70-fed9-4b3d-9d43-f01cf628d9af" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1070.982498] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "5ef5fe70-fed9-4b3d-9d43-f01cf628d9af" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1070.982746] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "5ef5fe70-fed9-4b3d-9d43-f01cf628d9af-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1070.982922] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "5ef5fe70-fed9-4b3d-9d43-f01cf628d9af-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1070.983106] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "5ef5fe70-fed9-4b3d-9d43-f01cf628d9af-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1070.985390] env[63538]: INFO nova.compute.manager [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Terminating instance [ 1070.987383] env[63538]: DEBUG nova.compute.manager [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1070.987581] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1070.988598] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a2682f-16a3-474d-95ce-72d972610b69 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.997498] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1070.997803] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-24a9ada1-1117-46f3-86d5-ba4b9d1f7e89 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.034816] env[63538]: DEBUG oslo_concurrency.lockutils [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.905s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.040440] env[63538]: DEBUG oslo_concurrency.lockutils [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.166s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.040440] env[63538]: DEBUG nova.objects.instance [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lazy-loading 'resources' on Instance uuid 209c5f46-9c63-4f55-bc75-bc2e4da989ac {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1071.076761] env[63538]: INFO nova.scheduler.client.report [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Deleted allocations for instance cbd40984-29b6-4ed9-8c87-9fd4c80f6f13 [ 1071.079382] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1071.079646] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1071.079838] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Deleting the datastore file [datastore1] 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1071.089020] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aade06fa-6121-423c-b3af-6ac552249be5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.094490] env[63538]: DEBUG oslo_vmware.api [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1071.094490] env[63538]: value = "task-5101590" [ 1071.094490] env[63538]: _type = "Task" [ 1071.094490] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.111512] env[63538]: DEBUG oslo_vmware.api [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101590, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.221913] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d619ae16-ac68-43a6-84d3-2ab7604401c1 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.222249] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d619ae16-ac68-43a6-84d3-2ab7604401c1 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.222523] env[63538]: DEBUG nova.compute.manager [None req-d619ae16-ac68-43a6-84d3-2ab7604401c1 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Going to confirm migration 4 {{(pid=63538) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1071.258012] env[63538]: DEBUG nova.compute.manager [req-e0ee0d42-f61e-4399-a53e-b100b2b63425 req-a397271a-8819-4729-9b29-34c9c7a78617 service nova] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Received event network-vif-plugged-8826b736-8295-4d09-8211-ccda578b133e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1071.258253] env[63538]: DEBUG oslo_concurrency.lockutils [req-e0ee0d42-f61e-4399-a53e-b100b2b63425 req-a397271a-8819-4729-9b29-34c9c7a78617 service nova] Acquiring lock "6257bf5c-8a1c-4204-9605-cc07491e14ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.258471] env[63538]: DEBUG oslo_concurrency.lockutils [req-e0ee0d42-f61e-4399-a53e-b100b2b63425 req-a397271a-8819-4729-9b29-34c9c7a78617 service nova] Lock "6257bf5c-8a1c-4204-9605-cc07491e14ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.258652] env[63538]: DEBUG oslo_concurrency.lockutils [req-e0ee0d42-f61e-4399-a53e-b100b2b63425 req-a397271a-8819-4729-9b29-34c9c7a78617 service nova] Lock "6257bf5c-8a1c-4204-9605-cc07491e14ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.258895] env[63538]: DEBUG nova.compute.manager [req-e0ee0d42-f61e-4399-a53e-b100b2b63425 req-a397271a-8819-4729-9b29-34c9c7a78617 service nova] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] No waiting events found dispatching network-vif-plugged-8826b736-8295-4d09-8211-ccda578b133e {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1071.259110] env[63538]: WARNING nova.compute.manager [req-e0ee0d42-f61e-4399-a53e-b100b2b63425 req-a397271a-8819-4729-9b29-34c9c7a78617 service nova] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Received unexpected event network-vif-plugged-8826b736-8295-4d09-8211-ccda578b133e for instance with vm_state building and task_state spawning. [ 1071.259290] env[63538]: DEBUG nova.compute.manager [req-e0ee0d42-f61e-4399-a53e-b100b2b63425 req-a397271a-8819-4729-9b29-34c9c7a78617 service nova] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Received event network-changed-8826b736-8295-4d09-8211-ccda578b133e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1071.259454] env[63538]: DEBUG nova.compute.manager [req-e0ee0d42-f61e-4399-a53e-b100b2b63425 req-a397271a-8819-4729-9b29-34c9c7a78617 service nova] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Refreshing instance network info cache due to event network-changed-8826b736-8295-4d09-8211-ccda578b133e. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1071.259630] env[63538]: DEBUG oslo_concurrency.lockutils [req-e0ee0d42-f61e-4399-a53e-b100b2b63425 req-a397271a-8819-4729-9b29-34c9c7a78617 service nova] Acquiring lock "refresh_cache-6257bf5c-8a1c-4204-9605-cc07491e14ea" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1071.262122] env[63538]: DEBUG nova.network.neutron [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Updating instance_info_cache with network_info: [{"id": "07db4bb7-d084-4f5d-89b1-07062ff397f6", "address": "fa:16:3e:dc:69:6f", "network": {"id": "4aa7976c-5900-4be2-a5e2-8d641ac24be9", "bridge": "br-int", "label": "tempest-ServersTestJSON-119331503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea05f3fb4676466bb2a286f5a2fefb8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07db4bb7-d0", "ovs_interfaceid": "07db4bb7-d084-4f5d-89b1-07062ff397f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.268857] env[63538]: DEBUG oslo_concurrency.lockutils [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "466be7db-79e4-49fd-aa3b-56fbe5c60457-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.269097] env[63538]: DEBUG oslo_concurrency.lockutils [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "466be7db-79e4-49fd-aa3b-56fbe5c60457-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.269283] env[63538]: DEBUG oslo_concurrency.lockutils [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "466be7db-79e4-49fd-aa3b-56fbe5c60457-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.287849] env[63538]: INFO nova.compute.manager [None req-4d79f2ed-1934-4538-acce-9698fcc7d74b tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Detaching volume d09b8ebf-5a95-4a50-ba17-6eaf05d0b995 [ 1071.299517] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35dce0ab-8801-4a62-b38b-7896dab79b65 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.323969] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-893e6231-1ad5-4760-b8e7-3499d3887847 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.333879] env[63538]: INFO nova.virt.block_device [None req-4d79f2ed-1934-4538-acce-9698fcc7d74b tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Attempting to driver detach volume d09b8ebf-5a95-4a50-ba17-6eaf05d0b995 from mountpoint /dev/sdb [ 1071.334197] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d79f2ed-1934-4538-acce-9698fcc7d74b tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Volume detach. Driver type: vmdk {{(pid=63538) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1071.334410] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d79f2ed-1934-4538-acce-9698fcc7d74b tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992489', 'volume_id': 'd09b8ebf-5a95-4a50-ba17-6eaf05d0b995', 'name': 'volume-d09b8ebf-5a95-4a50-ba17-6eaf05d0b995', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '431a67e6-b90d-4930-9a86-7c49d1022ddc', 'attached_at': '', 'detached_at': '', 'volume_id': 'd09b8ebf-5a95-4a50-ba17-6eaf05d0b995', 'serial': 'd09b8ebf-5a95-4a50-ba17-6eaf05d0b995'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1071.335273] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29c64312-2e03-4f38-a3e7-17e5653d3b4c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.363937] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e92c6abe-a278-4af9-a999-0ad205b9c07f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.372149] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-337682fa-2c5c-4b19-bfa2-71660fd50e6c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.396459] env[63538]: DEBUG nova.network.neutron [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1071.399244] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d530a98-d891-4f3e-b8d9-ac4cb0f99bb0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.415415] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d79f2ed-1934-4538-acce-9698fcc7d74b tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] The volume has not been displaced from its original location: [datastore1] volume-d09b8ebf-5a95-4a50-ba17-6eaf05d0b995/volume-d09b8ebf-5a95-4a50-ba17-6eaf05d0b995.vmdk. No consolidation needed. {{(pid=63538) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1071.420871] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d79f2ed-1934-4538-acce-9698fcc7d74b tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Reconfiguring VM instance instance-00000058 to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1071.423026] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7af4ccf7-9e95-4a64-b733-f396f087f293 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.443326] env[63538]: DEBUG oslo_vmware.api [None req-4d79f2ed-1934-4538-acce-9698fcc7d74b tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 1071.443326] env[63538]: value = "task-5101591" [ 1071.443326] env[63538]: _type = "Task" [ 1071.443326] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.453196] env[63538]: DEBUG oslo_vmware.api [None req-4d79f2ed-1934-4538-acce-9698fcc7d74b tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101591, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.592424] env[63538]: DEBUG oslo_concurrency.lockutils [None req-89c2340d-45a9-43a2-9d4c-03330c9656b3 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "cbd40984-29b6-4ed9-8c87-9fd4c80f6f13" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.091s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.605033] env[63538]: DEBUG oslo_vmware.api [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101590, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.414836} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.609852] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1071.610763] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1071.611123] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1071.611414] env[63538]: INFO nova.compute.manager [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1071.611787] env[63538]: DEBUG oslo.service.loopingcall [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1071.612484] env[63538]: DEBUG nova.compute.manager [-] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1071.612484] env[63538]: DEBUG nova.network.neutron [-] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1071.676901] env[63538]: DEBUG nova.network.neutron [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Updating instance_info_cache with network_info: [{"id": "8826b736-8295-4d09-8211-ccda578b133e", "address": "fa:16:3e:92:cf:b0", "network": {"id": "5a392b91-3e04-4ce1-bf51-bdad0c03becd", "bridge": "br-int", "label": "tempest-ServersTestJSON-180232194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9294e4310e484e338932e3514d079594", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4", "external-id": "nsx-vlan-transportzone-545", "segmentation_id": 545, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8826b736-82", "ovs_interfaceid": "8826b736-8295-4d09-8211-ccda578b133e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.768947] env[63538]: DEBUG oslo_concurrency.lockutils [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Releasing lock "refresh_cache-0c19d662-4ae0-4ec9-93b4-9bd45822ed92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1071.769306] env[63538]: DEBUG nova.compute.manager [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Instance network_info: |[{"id": "07db4bb7-d084-4f5d-89b1-07062ff397f6", "address": "fa:16:3e:dc:69:6f", "network": {"id": "4aa7976c-5900-4be2-a5e2-8d641ac24be9", "bridge": "br-int", "label": "tempest-ServersTestJSON-119331503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea05f3fb4676466bb2a286f5a2fefb8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916", "external-id": "nsx-vlan-transportzone-269", "segmentation_id": 269, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07db4bb7-d0", "ovs_interfaceid": "07db4bb7-d084-4f5d-89b1-07062ff397f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1071.769747] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:69:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '00b1e0dc-9aea-4ee2-a76b-1f0c3eaba916', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '07db4bb7-d084-4f5d-89b1-07062ff397f6', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1071.777830] env[63538]: DEBUG oslo.service.loopingcall [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1071.779798] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d619ae16-ac68-43a6-84d3-2ab7604401c1 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "refresh_cache-fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1071.779967] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d619ae16-ac68-43a6-84d3-2ab7604401c1 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired lock "refresh_cache-fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.780159] env[63538]: DEBUG nova.network.neutron [None req-d619ae16-ac68-43a6-84d3-2ab7604401c1 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1071.780348] env[63538]: DEBUG nova.objects.instance [None req-d619ae16-ac68-43a6-84d3-2ab7604401c1 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lazy-loading 'info_cache' on Instance uuid fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1071.782920] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1071.786799] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3804dc69-ded5-4775-869f-c312008923f1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.790346] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dc094386-7e96-4f18-8ccd-791f5e05c4c5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.817751] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c56ef96c-aa05-42b1-8b8e-d3cdb6363727 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.821181] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1071.821181] env[63538]: value = "task-5101592" [ 1071.821181] env[63538]: _type = "Task" [ 1071.821181] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.857381] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Creating Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1071.858445] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-648e49c3-9854-4864-933b-491dc8ba87e2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.863080] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d78d187-eb4d-406d-b771-a80df0bf3f78 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.870035] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101592, 'name': CreateVM_Task} progress is 15%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.877972] env[63538]: DEBUG oslo_vmware.api [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1071.877972] env[63538]: value = "task-5101593" [ 1071.877972] env[63538]: _type = "Task" [ 1071.877972] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.879714] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53a98333-b825-47e5-be5a-a31c984e2b02 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.906321] env[63538]: DEBUG nova.compute.provider_tree [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1071.908308] env[63538]: DEBUG oslo_vmware.api [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101593, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.954607] env[63538]: DEBUG oslo_vmware.api [None req-4d79f2ed-1934-4538-acce-9698fcc7d74b tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101591, 'name': ReconfigVM_Task, 'duration_secs': 0.478207} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.955104] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d79f2ed-1934-4538-acce-9698fcc7d74b tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Reconfigured VM instance instance-00000058 to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1071.960065] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8263180-cf71-40e5-81ba-c6e9b6cadf7c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.981224] env[63538]: DEBUG oslo_vmware.api [None req-4d79f2ed-1934-4538-acce-9698fcc7d74b tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 1071.981224] env[63538]: value = "task-5101594" [ 1071.981224] env[63538]: _type = "Task" [ 1071.981224] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.992374] env[63538]: DEBUG oslo_vmware.api [None req-4d79f2ed-1934-4538-acce-9698fcc7d74b tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101594, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.180176] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Releasing lock "refresh_cache-6257bf5c-8a1c-4204-9605-cc07491e14ea" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1072.180176] env[63538]: DEBUG nova.compute.manager [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Instance network_info: |[{"id": "8826b736-8295-4d09-8211-ccda578b133e", "address": "fa:16:3e:92:cf:b0", "network": {"id": "5a392b91-3e04-4ce1-bf51-bdad0c03becd", "bridge": "br-int", "label": "tempest-ServersTestJSON-180232194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9294e4310e484e338932e3514d079594", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4", "external-id": "nsx-vlan-transportzone-545", "segmentation_id": 545, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8826b736-82", "ovs_interfaceid": "8826b736-8295-4d09-8211-ccda578b133e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1072.180512] env[63538]: DEBUG oslo_concurrency.lockutils [req-e0ee0d42-f61e-4399-a53e-b100b2b63425 req-a397271a-8819-4729-9b29-34c9c7a78617 service nova] Acquired lock "refresh_cache-6257bf5c-8a1c-4204-9605-cc07491e14ea" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.180630] env[63538]: DEBUG nova.network.neutron [req-e0ee0d42-f61e-4399-a53e-b100b2b63425 req-a397271a-8819-4729-9b29-34c9c7a78617 service nova] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Refreshing network info cache for port 8826b736-8295-4d09-8211-ccda578b133e {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1072.181938] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:92:cf:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8826b736-8295-4d09-8211-ccda578b133e', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1072.192169] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Creating folder: Project (9294e4310e484e338932e3514d079594). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1072.194303] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b2cfb541-9734-4830-9b8b-94cd8eb7d46f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.210757] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Created folder: Project (9294e4310e484e338932e3514d079594) in parent group-v992234. [ 1072.210906] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Creating folder: Instances. Parent ref: group-v992498. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1072.211139] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8a100425-f644-40ea-b003-dc391da67ee2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.225026] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Created folder: Instances in parent group-v992498. [ 1072.225026] env[63538]: DEBUG oslo.service.loopingcall [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1072.225026] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1072.225026] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9d01556b-c5ee-4be4-99d5-ec7c57b1324c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.244202] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1072.244202] env[63538]: value = "task-5101597" [ 1072.244202] env[63538]: _type = "Task" [ 1072.244202] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.255211] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101597, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.333510] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101592, 'name': CreateVM_Task, 'duration_secs': 0.351673} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.333731] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1072.336090] env[63538]: DEBUG oslo_concurrency.lockutils [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1072.336090] env[63538]: DEBUG oslo_concurrency.lockutils [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.336090] env[63538]: DEBUG oslo_concurrency.lockutils [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1072.336090] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-702591ac-7fdd-4722-9a76-c14ea0f1a1e6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.342825] env[63538]: DEBUG oslo_vmware.api [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 1072.342825] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cd37e5-96b1-90e7-325e-abd0fe214191" [ 1072.342825] env[63538]: _type = "Task" [ 1072.342825] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.353156] env[63538]: DEBUG oslo_vmware.api [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cd37e5-96b1-90e7-325e-abd0fe214191, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.365267] env[63538]: DEBUG oslo_concurrency.lockutils [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "refresh_cache-466be7db-79e4-49fd-aa3b-56fbe5c60457" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1072.365616] env[63538]: DEBUG oslo_concurrency.lockutils [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired lock "refresh_cache-466be7db-79e4-49fd-aa3b-56fbe5c60457" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.365735] env[63538]: DEBUG nova.network.neutron [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1072.397422] env[63538]: DEBUG oslo_vmware.api [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101593, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.409164] env[63538]: DEBUG nova.scheduler.client.report [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1072.493372] env[63538]: DEBUG nova.network.neutron [-] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.494729] env[63538]: DEBUG oslo_vmware.api [None req-4d79f2ed-1934-4538-acce-9698fcc7d74b tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101594, 'name': ReconfigVM_Task, 'duration_secs': 0.170001} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.498550] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d79f2ed-1934-4538-acce-9698fcc7d74b tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992489', 'volume_id': 'd09b8ebf-5a95-4a50-ba17-6eaf05d0b995', 'name': 'volume-d09b8ebf-5a95-4a50-ba17-6eaf05d0b995', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '431a67e6-b90d-4930-9a86-7c49d1022ddc', 'attached_at': '', 'detached_at': '', 'volume_id': 'd09b8ebf-5a95-4a50-ba17-6eaf05d0b995', 'serial': 'd09b8ebf-5a95-4a50-ba17-6eaf05d0b995'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1072.627465] env[63538]: DEBUG nova.network.neutron [req-e0ee0d42-f61e-4399-a53e-b100b2b63425 req-a397271a-8819-4729-9b29-34c9c7a78617 service nova] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Updated VIF entry in instance network info cache for port 8826b736-8295-4d09-8211-ccda578b133e. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1072.627933] env[63538]: DEBUG nova.network.neutron [req-e0ee0d42-f61e-4399-a53e-b100b2b63425 req-a397271a-8819-4729-9b29-34c9c7a78617 service nova] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Updating instance_info_cache with network_info: [{"id": "8826b736-8295-4d09-8211-ccda578b133e", "address": "fa:16:3e:92:cf:b0", "network": {"id": "5a392b91-3e04-4ce1-bf51-bdad0c03becd", "bridge": "br-int", "label": "tempest-ServersTestJSON-180232194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9294e4310e484e338932e3514d079594", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4", "external-id": "nsx-vlan-transportzone-545", "segmentation_id": 545, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8826b736-82", "ovs_interfaceid": "8826b736-8295-4d09-8211-ccda578b133e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.754583] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101597, 'name': CreateVM_Task, 'duration_secs': 0.368527} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.754937] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1072.755499] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1072.859086] env[63538]: DEBUG oslo_vmware.api [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cd37e5-96b1-90e7-325e-abd0fe214191, 'name': SearchDatastore_Task, 'duration_secs': 0.011988} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.859341] env[63538]: DEBUG oslo_concurrency.lockutils [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1072.859587] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1072.859861] env[63538]: DEBUG oslo_concurrency.lockutils [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1072.859970] env[63538]: DEBUG oslo_concurrency.lockutils [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.860181] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1072.860472] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.861135] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1072.861135] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb567cf8-1b15-4023-84d9-d59530eead27 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.862996] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec91b276-16ec-4441-801e-72831a18c83f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.869162] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Waiting for the task: (returnval){ [ 1072.869162] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d0024d-3acd-a8db-314d-fc754ed9772c" [ 1072.869162] env[63538]: _type = "Task" [ 1072.869162] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.880809] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d0024d-3acd-a8db-314d-fc754ed9772c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.882256] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1072.882256] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1072.882961] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b534377f-b7cf-420b-85f0-d1eaa4afa491 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.890315] env[63538]: DEBUG oslo_vmware.api [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 1072.890315] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c2526a-aefb-c518-da7e-f7962f9ce49d" [ 1072.890315] env[63538]: _type = "Task" [ 1072.890315] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.898735] env[63538]: DEBUG oslo_vmware.api [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101593, 'name': CreateSnapshot_Task, 'duration_secs': 0.713368} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.901753] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Created Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1072.901753] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21e3d3fb-4499-4af1-9e3e-3ac963225a00 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.908444] env[63538]: DEBUG oslo_vmware.api [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c2526a-aefb-c518-da7e-f7962f9ce49d, 'name': SearchDatastore_Task, 'duration_secs': 0.010042} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.909639] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3115ba00-cfef-43bc-9dae-abe63b8ad036 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.918263] env[63538]: DEBUG oslo_concurrency.lockutils [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.879s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.925229] env[63538]: DEBUG oslo_concurrency.lockutils [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 2.744s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.925229] env[63538]: DEBUG nova.objects.instance [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63538) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1072.933028] env[63538]: DEBUG oslo_vmware.api [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 1072.933028] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52dae3f0-295c-c310-36c2-c7c23b5a976b" [ 1072.933028] env[63538]: _type = "Task" [ 1072.933028] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.943774] env[63538]: DEBUG oslo_vmware.api [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52dae3f0-295c-c310-36c2-c7c23b5a976b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.954151] env[63538]: INFO nova.scheduler.client.report [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Deleted allocations for instance 209c5f46-9c63-4f55-bc75-bc2e4da989ac [ 1072.996170] env[63538]: INFO nova.compute.manager [-] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Took 1.38 seconds to deallocate network for instance. [ 1073.085081] env[63538]: DEBUG nova.objects.instance [None req-4d79f2ed-1934-4538-acce-9698fcc7d74b tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lazy-loading 'flavor' on Instance uuid 431a67e6-b90d-4930-9a86-7c49d1022ddc {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1073.131203] env[63538]: DEBUG oslo_concurrency.lockutils [req-e0ee0d42-f61e-4399-a53e-b100b2b63425 req-a397271a-8819-4729-9b29-34c9c7a78617 service nova] Releasing lock "refresh_cache-6257bf5c-8a1c-4204-9605-cc07491e14ea" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1073.223480] env[63538]: DEBUG nova.network.neutron [None req-d619ae16-ac68-43a6-84d3-2ab7604401c1 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Updating instance_info_cache with network_info: [{"id": "c5eca2fb-a7a7-43a0-a2a0-5016e8c11f78", "address": "fa:16:3e:85:66:7a", "network": {"id": "8facc2ff-7a17-4beb-ad4f-7cf9d95cc93c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-137157780-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a701618902d411b8af203fdbb1069be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5eca2fb-a7", "ovs_interfaceid": "c5eca2fb-a7a7-43a0-a2a0-5016e8c11f78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1073.280062] env[63538]: DEBUG nova.compute.manager [req-39c6add4-871e-4903-a314-491361b29e4f req-9cd82843-2adf-4f37-807a-6803a4c9bae0 service nova] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Received event network-vif-deleted-da8361d9-226a-4b83-967f-41c85a0d4920 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1073.301431] env[63538]: DEBUG nova.network.neutron [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Updating instance_info_cache with network_info: [{"id": "a679ee9b-3e51-4ce7-ab24-0792218d36ba", "address": "fa:16:3e:08:aa:47", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa679ee9b-3e", "ovs_interfaceid": "a679ee9b-3e51-4ce7-ab24-0792218d36ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1073.378189] env[63538]: DEBUG oslo_concurrency.lockutils [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "049518bd-d569-491a-8f79-6f0b78cf44b2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.378611] env[63538]: DEBUG oslo_concurrency.lockutils [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "049518bd-d569-491a-8f79-6f0b78cf44b2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.383813] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d0024d-3acd-a8db-314d-fc754ed9772c, 'name': SearchDatastore_Task, 'duration_secs': 0.016538} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.384407] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1073.384843] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1073.384843] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1073.443572] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Creating linked-clone VM from snapshot {{(pid=63538) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1073.443572] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5bf058c8-1af6-496b-b221-b9e87a8cb052 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.459704] env[63538]: DEBUG oslo_vmware.api [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52dae3f0-295c-c310-36c2-c7c23b5a976b, 'name': SearchDatastore_Task, 'duration_secs': 0.014893} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.460966] env[63538]: DEBUG oslo_concurrency.lockutils [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1073.461141] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 0c19d662-4ae0-4ec9-93b4-9bd45822ed92/0c19d662-4ae0-4ec9-93b4-9bd45822ed92.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1073.463623] env[63538]: DEBUG oslo_vmware.api [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1073.463623] env[63538]: value = "task-5101598" [ 1073.463623] env[63538]: _type = "Task" [ 1073.463623] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.464105] env[63538]: DEBUG oslo_concurrency.lockutils [None req-72c83882-0c23-4499-9be6-97d0bc48d486 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "209c5f46-9c63-4f55-bc75-bc2e4da989ac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.029s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.465036] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.465255] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1073.465495] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-360a3abb-e674-46c5-9492-829a9a6a04a6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.469980] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-db6a6a3b-4957-4e7f-b1d8-027b7d833c19 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.484008] env[63538]: DEBUG oslo_vmware.api [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101598, 'name': CloneVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.485742] env[63538]: DEBUG oslo_vmware.api [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 1073.485742] env[63538]: value = "task-5101599" [ 1073.485742] env[63538]: _type = "Task" [ 1073.485742] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.487012] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1073.487205] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1073.491118] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24df66cb-7cab-4206-b37c-85621a2c7b3d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.503137] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.503408] env[63538]: DEBUG oslo_vmware.api [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101599, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.504573] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Waiting for the task: (returnval){ [ 1073.504573] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5238021e-f8fc-91e0-b421-7d40cefe46e1" [ 1073.504573] env[63538]: _type = "Task" [ 1073.504573] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.514623] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5238021e-f8fc-91e0-b421-7d40cefe46e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.726863] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d619ae16-ac68-43a6-84d3-2ab7604401c1 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Releasing lock "refresh_cache-fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1073.726863] env[63538]: DEBUG nova.objects.instance [None req-d619ae16-ac68-43a6-84d3-2ab7604401c1 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lazy-loading 'migration_context' on Instance uuid fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1073.805321] env[63538]: DEBUG oslo_concurrency.lockutils [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Releasing lock "refresh_cache-466be7db-79e4-49fd-aa3b-56fbe5c60457" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1073.886313] env[63538]: DEBUG nova.compute.manager [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1073.944100] env[63538]: DEBUG oslo_concurrency.lockutils [None req-58c6fbe7-3812-4ef4-818d-a1ecbffa37eb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.945317] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.442s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.945563] env[63538]: DEBUG nova.objects.instance [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lazy-loading 'resources' on Instance uuid 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1073.980542] env[63538]: DEBUG oslo_vmware.api [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101598, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.998233] env[63538]: DEBUG oslo_vmware.api [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101599, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.019041] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5238021e-f8fc-91e0-b421-7d40cefe46e1, 'name': SearchDatastore_Task, 'duration_secs': 0.020687} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.019041] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ace1a43-91f5-4027-997d-d485a61e8a2e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.024989] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Waiting for the task: (returnval){ [ 1074.024989] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52fddc3e-039f-243d-1766-ed7c1e11a615" [ 1074.024989] env[63538]: _type = "Task" [ 1074.024989] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.035182] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52fddc3e-039f-243d-1766-ed7c1e11a615, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.094243] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4d79f2ed-1934-4538-acce-9698fcc7d74b tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "431a67e6-b90d-4930-9a86-7c49d1022ddc" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.308s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.107591] env[63538]: DEBUG oslo_concurrency.lockutils [None req-29b0c0d8-3230-4a15-97f0-7b054fef85e6 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "431a67e6-b90d-4930-9a86-7c49d1022ddc" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.108345] env[63538]: DEBUG oslo_concurrency.lockutils [None req-29b0c0d8-3230-4a15-97f0-7b054fef85e6 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "431a67e6-b90d-4930-9a86-7c49d1022ddc" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.230095] env[63538]: DEBUG nova.objects.base [None req-d619ae16-ac68-43a6-84d3-2ab7604401c1 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=63538) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1074.231091] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35fa19c5-8379-4930-8158-74992dd233fe {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.253130] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-832d25b7-5279-4c4c-b550-fdc90b7afd71 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.260307] env[63538]: DEBUG oslo_vmware.api [None req-d619ae16-ac68-43a6-84d3-2ab7604401c1 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1074.260307] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5216f60f-bca0-7f27-fe87-b0e1e2e46a64" [ 1074.260307] env[63538]: _type = "Task" [ 1074.260307] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.272211] env[63538]: DEBUG oslo_vmware.api [None req-d619ae16-ac68-43a6-84d3-2ab7604401c1 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5216f60f-bca0-7f27-fe87-b0e1e2e46a64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.334914] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4cbecf9-a4f9-4064-9224-8c8835636830 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.357620] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4a9f99-7901-49b5-8f97-f23d62162473 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.366278] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Updating instance '466be7db-79e4-49fd-aa3b-56fbe5c60457' progress to 83 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1074.414456] env[63538]: DEBUG oslo_concurrency.lockutils [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.483292] env[63538]: DEBUG oslo_vmware.api [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101598, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.503181] env[63538]: DEBUG oslo_vmware.api [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101599, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.681315} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.504116] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 0c19d662-4ae0-4ec9-93b4-9bd45822ed92/0c19d662-4ae0-4ec9-93b4-9bd45822ed92.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1074.504405] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1074.504709] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-83c7ddd9-8dcc-4516-993b-2e387c1c598c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.513771] env[63538]: DEBUG oslo_vmware.api [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 1074.513771] env[63538]: value = "task-5101600" [ 1074.513771] env[63538]: _type = "Task" [ 1074.513771] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.526306] env[63538]: DEBUG oslo_vmware.api [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101600, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.540562] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52fddc3e-039f-243d-1766-ed7c1e11a615, 'name': SearchDatastore_Task, 'duration_secs': 0.063912} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.542229] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1074.542635] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 6257bf5c-8a1c-4204-9605-cc07491e14ea/6257bf5c-8a1c-4204-9605-cc07491e14ea.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1074.543076] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-950d3322-5ae0-42d5-beb1-ffee4500bb3a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.554022] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Waiting for the task: (returnval){ [ 1074.554022] env[63538]: value = "task-5101601" [ 1074.554022] env[63538]: _type = "Task" [ 1074.554022] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.564636] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Task: {'id': task-5101601, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.612209] env[63538]: INFO nova.compute.manager [None req-29b0c0d8-3230-4a15-97f0-7b054fef85e6 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Detaching volume 0ba2d843-8ce4-4e02-a7d6-5ba18dcb0ddd [ 1074.669183] env[63538]: INFO nova.virt.block_device [None req-29b0c0d8-3230-4a15-97f0-7b054fef85e6 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Attempting to driver detach volume 0ba2d843-8ce4-4e02-a7d6-5ba18dcb0ddd from mountpoint /dev/sdc [ 1074.669463] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-29b0c0d8-3230-4a15-97f0-7b054fef85e6 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Volume detach. Driver type: vmdk {{(pid=63538) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1074.669659] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-29b0c0d8-3230-4a15-97f0-7b054fef85e6 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992495', 'volume_id': '0ba2d843-8ce4-4e02-a7d6-5ba18dcb0ddd', 'name': 'volume-0ba2d843-8ce4-4e02-a7d6-5ba18dcb0ddd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '431a67e6-b90d-4930-9a86-7c49d1022ddc', 'attached_at': '', 'detached_at': '', 'volume_id': '0ba2d843-8ce4-4e02-a7d6-5ba18dcb0ddd', 'serial': '0ba2d843-8ce4-4e02-a7d6-5ba18dcb0ddd'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1074.671290] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eb3af0e-b071-497b-97cd-6cd9d87306bf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.703502] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f971a7cc-a573-459f-bd5c-5a36d57d5e04 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.708017] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1470a727-71e0-4b09-baf5-8a1b1e21a851 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.716875] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0f08876-b22f-4924-9551-3eb316aa329a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.720873] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1f6cd3-9f56-42ec-8c06-2059fde33f7f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.772955] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad9a1b9e-5c3e-4064-abd6-643764e202d0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.779675] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdd740da-82e6-4823-bb28-306ef15223c2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.788645] env[63538]: DEBUG oslo_vmware.api [None req-d619ae16-ac68-43a6-84d3-2ab7604401c1 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5216f60f-bca0-7f27-fe87-b0e1e2e46a64, 'name': SearchDatastore_Task, 'duration_secs': 0.017569} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.801772] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d619ae16-ac68-43a6-84d3-2ab7604401c1 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.802364] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-29b0c0d8-3230-4a15-97f0-7b054fef85e6 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] The volume has not been displaced from its original location: [datastore1] volume-0ba2d843-8ce4-4e02-a7d6-5ba18dcb0ddd/volume-0ba2d843-8ce4-4e02-a7d6-5ba18dcb0ddd.vmdk. No consolidation needed. {{(pid=63538) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1074.807849] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-29b0c0d8-3230-4a15-97f0-7b054fef85e6 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Reconfiguring VM instance instance-00000058 to detach disk 2002 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1074.809379] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a15bce79-fce7-443d-9f09-30a9943850bd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.813783] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b7aa9b9e-1d98-4354-ae2d-2c9dce972cb0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.838782] env[63538]: DEBUG nova.compute.provider_tree [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1074.841464] env[63538]: DEBUG oslo_vmware.api [None req-29b0c0d8-3230-4a15-97f0-7b054fef85e6 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 1074.841464] env[63538]: value = "task-5101602" [ 1074.841464] env[63538]: _type = "Task" [ 1074.841464] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.850861] env[63538]: DEBUG oslo_vmware.api [None req-29b0c0d8-3230-4a15-97f0-7b054fef85e6 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101602, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.873728] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1074.874094] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1b3b9327-3c7c-4064-bb19-a3ab3d6528f0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.881412] env[63538]: DEBUG oslo_vmware.api [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1074.881412] env[63538]: value = "task-5101603" [ 1074.881412] env[63538]: _type = "Task" [ 1074.881412] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.893402] env[63538]: DEBUG oslo_vmware.api [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101603, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.979717] env[63538]: DEBUG oslo_vmware.api [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101598, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.024214] env[63538]: DEBUG oslo_vmware.api [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101600, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.130496} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.024539] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1075.025374] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5c9faf2-907e-4444-aaa6-3f9ea0222ef3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.056931] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 0c19d662-4ae0-4ec9-93b4-9bd45822ed92/0c19d662-4ae0-4ec9-93b4-9bd45822ed92.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1075.057652] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6daecdc-6e20-48a5-b0f3-8c6c46dfd996 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.085553] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Task: {'id': task-5101601, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.087952] env[63538]: DEBUG oslo_vmware.api [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 1075.087952] env[63538]: value = "task-5101604" [ 1075.087952] env[63538]: _type = "Task" [ 1075.087952] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.097777] env[63538]: DEBUG oslo_vmware.api [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101604, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.344068] env[63538]: DEBUG nova.scheduler.client.report [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1075.367515] env[63538]: DEBUG oslo_vmware.api [None req-29b0c0d8-3230-4a15-97f0-7b054fef85e6 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101602, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.397643] env[63538]: DEBUG oslo_vmware.api [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101603, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.487462] env[63538]: DEBUG oslo_vmware.api [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101598, 'name': CloneVM_Task} progress is 95%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.568494] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Task: {'id': task-5101601, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.878144} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.568779] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 6257bf5c-8a1c-4204-9605-cc07491e14ea/6257bf5c-8a1c-4204-9605-cc07491e14ea.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1075.569078] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1075.569300] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-82187dce-8f17-4a3b-9eda-3263d7145d40 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.581168] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Waiting for the task: (returnval){ [ 1075.581168] env[63538]: value = "task-5101605" [ 1075.581168] env[63538]: _type = "Task" [ 1075.581168] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.586342] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Task: {'id': task-5101605, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.602580] env[63538]: DEBUG oslo_vmware.api [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101604, 'name': ReconfigVM_Task, 'duration_secs': 0.382904} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.602996] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 0c19d662-4ae0-4ec9-93b4-9bd45822ed92/0c19d662-4ae0-4ec9-93b4-9bd45822ed92.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1075.603769] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-687d2c19-b6bd-495f-a629-6ec01bbd7620 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.614894] env[63538]: DEBUG oslo_vmware.api [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 1075.614894] env[63538]: value = "task-5101606" [ 1075.614894] env[63538]: _type = "Task" [ 1075.614894] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.629367] env[63538]: DEBUG oslo_vmware.api [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101606, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.861464] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.915s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.862534] env[63538]: DEBUG oslo_vmware.api [None req-29b0c0d8-3230-4a15-97f0-7b054fef85e6 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101602, 'name': ReconfigVM_Task, 'duration_secs': 0.807728} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.864373] env[63538]: DEBUG oslo_concurrency.lockutils [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.450s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1075.870458] env[63538]: INFO nova.compute.claims [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1075.873362] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-29b0c0d8-3230-4a15-97f0-7b054fef85e6 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Reconfigured VM instance instance-00000058 to detach disk 2002 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1075.882948] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8b2a4c9-9ad8-416f-8c30-71400a828f2a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.895063] env[63538]: INFO nova.scheduler.client.report [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Deleted allocations for instance 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af [ 1075.904535] env[63538]: DEBUG oslo_vmware.api [None req-29b0c0d8-3230-4a15-97f0-7b054fef85e6 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 1075.904535] env[63538]: value = "task-5101607" [ 1075.904535] env[63538]: _type = "Task" [ 1075.904535] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.910997] env[63538]: DEBUG oslo_vmware.api [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101603, 'name': PowerOnVM_Task, 'duration_secs': 0.814508} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.912735] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1075.912735] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-682b4c3e-8cc4-499f-b7ae-ce589439b83a tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Updating instance '466be7db-79e4-49fd-aa3b-56fbe5c60457' progress to 100 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1075.920771] env[63538]: DEBUG oslo_vmware.api [None req-29b0c0d8-3230-4a15-97f0-7b054fef85e6 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101607, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.985163] env[63538]: DEBUG oslo_vmware.api [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101598, 'name': CloneVM_Task, 'duration_secs': 2.419401} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.986565] env[63538]: INFO nova.virt.vmwareapi.vmops [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Created linked-clone VM from snapshot [ 1075.987807] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e765c17-fa92-4c9b-aa78-0e658e33fb76 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.999697] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Uploading image 19a18d5a-92a4-4ea9-80ed-b2e672628ea8 {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 1076.019634] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "42af31f3-a9d0-4fdd-99fa-442ebe915277" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1076.019846] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "42af31f3-a9d0-4fdd-99fa-442ebe915277" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1076.028964] env[63538]: DEBUG oslo_vmware.rw_handles [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1076.028964] env[63538]: value = "vm-992502" [ 1076.028964] env[63538]: _type = "VirtualMachine" [ 1076.028964] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1076.029483] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-bde7bb8c-1191-4b8e-9fff-9be4f17abc20 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.039133] env[63538]: DEBUG oslo_vmware.rw_handles [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lease: (returnval){ [ 1076.039133] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522d9c7c-633d-e010-c900-a2fbec58db21" [ 1076.039133] env[63538]: _type = "HttpNfcLease" [ 1076.039133] env[63538]: } obtained for exporting VM: (result){ [ 1076.039133] env[63538]: value = "vm-992502" [ 1076.039133] env[63538]: _type = "VirtualMachine" [ 1076.039133] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1076.039968] env[63538]: DEBUG oslo_vmware.api [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the lease: (returnval){ [ 1076.039968] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522d9c7c-633d-e010-c900-a2fbec58db21" [ 1076.039968] env[63538]: _type = "HttpNfcLease" [ 1076.039968] env[63538]: } to be ready. {{(pid=63538) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1076.048601] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1076.048601] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522d9c7c-633d-e010-c900-a2fbec58db21" [ 1076.048601] env[63538]: _type = "HttpNfcLease" [ 1076.048601] env[63538]: } is initializing. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1076.089375] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Task: {'id': task-5101605, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.144554} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.089684] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1076.090475] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c28528-82ec-45db-bf0c-9bd078da189b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.115049] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 6257bf5c-8a1c-4204-9605-cc07491e14ea/6257bf5c-8a1c-4204-9605-cc07491e14ea.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1076.115049] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67de7e46-27fe-4b34-97b6-858aeea7bde0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.138235] env[63538]: DEBUG oslo_vmware.api [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101606, 'name': Rename_Task, 'duration_secs': 0.177179} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.139766] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1076.140462] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Waiting for the task: (returnval){ [ 1076.140462] env[63538]: value = "task-5101609" [ 1076.140462] env[63538]: _type = "Task" [ 1076.140462] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.140462] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bf365e78-f9e8-4349-a2d9-e7597940fc4f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.153108] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Task: {'id': task-5101609, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.153108] env[63538]: DEBUG oslo_vmware.api [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 1076.153108] env[63538]: value = "task-5101610" [ 1076.153108] env[63538]: _type = "Task" [ 1076.153108] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.162408] env[63538]: DEBUG oslo_vmware.api [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101610, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.416133] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6ae87695-cfac-47db-a002-f632d17fbf84 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "5ef5fe70-fed9-4b3d-9d43-f01cf628d9af" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.433s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1076.420916] env[63538]: DEBUG oslo_vmware.api [None req-29b0c0d8-3230-4a15-97f0-7b054fef85e6 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101607, 'name': ReconfigVM_Task, 'duration_secs': 0.181833} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.423923] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-29b0c0d8-3230-4a15-97f0-7b054fef85e6 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992495', 'volume_id': '0ba2d843-8ce4-4e02-a7d6-5ba18dcb0ddd', 'name': 'volume-0ba2d843-8ce4-4e02-a7d6-5ba18dcb0ddd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '431a67e6-b90d-4930-9a86-7c49d1022ddc', 'attached_at': '', 'detached_at': '', 'volume_id': '0ba2d843-8ce4-4e02-a7d6-5ba18dcb0ddd', 'serial': '0ba2d843-8ce4-4e02-a7d6-5ba18dcb0ddd'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1076.523864] env[63538]: DEBUG nova.compute.manager [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1076.555179] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1076.555179] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522d9c7c-633d-e010-c900-a2fbec58db21" [ 1076.555179] env[63538]: _type = "HttpNfcLease" [ 1076.555179] env[63538]: } is ready. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1076.555179] env[63538]: DEBUG oslo_vmware.rw_handles [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1076.555179] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522d9c7c-633d-e010-c900-a2fbec58db21" [ 1076.555179] env[63538]: _type = "HttpNfcLease" [ 1076.555179] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1076.557418] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98903bcf-8a8f-494d-a1e3-d59f8d62bb3a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.568933] env[63538]: DEBUG oslo_vmware.rw_handles [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fdc36b-50c0-7472-8677-3fac6002b3ca/disk-0.vmdk from lease info. {{(pid=63538) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1076.569482] env[63538]: DEBUG oslo_vmware.rw_handles [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fdc36b-50c0-7472-8677-3fac6002b3ca/disk-0.vmdk for reading. {{(pid=63538) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1076.655296] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Task: {'id': task-5101609, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.664092] env[63538]: DEBUG oslo_vmware.api [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101610, 'name': PowerOnVM_Task, 'duration_secs': 0.510819} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.664386] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1076.664602] env[63538]: INFO nova.compute.manager [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Took 9.82 seconds to spawn the instance on the hypervisor. [ 1076.664786] env[63538]: DEBUG nova.compute.manager [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1076.665606] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaad9d9f-23a6-40f8-9ee3-bfcd746054f6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.787355] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e3fa443f-1b96-4fdc-8d01-43020b5f2863 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.983455] env[63538]: DEBUG nova.objects.instance [None req-29b0c0d8-3230-4a15-97f0-7b054fef85e6 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lazy-loading 'flavor' on Instance uuid 431a67e6-b90d-4930-9a86-7c49d1022ddc {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1077.049557] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1077.153460] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Task: {'id': task-5101609, 'name': ReconfigVM_Task, 'duration_secs': 0.588459} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.156455] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 6257bf5c-8a1c-4204-9605-cc07491e14ea/6257bf5c-8a1c-4204-9605-cc07491e14ea.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1077.158127] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-68eb7ceb-aea4-4490-809c-40bfcd672a93 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.167820] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Waiting for the task: (returnval){ [ 1077.167820] env[63538]: value = "task-5101611" [ 1077.167820] env[63538]: _type = "Task" [ 1077.167820] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.181807] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Task: {'id': task-5101611, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.187737] env[63538]: INFO nova.compute.manager [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Took 16.67 seconds to build instance. [ 1077.221376] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115c840a-0eb0-4532-91f5-5e28624e90cd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.232137] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4028b600-3665-421f-9a7b-4589702851d8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.275329] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a74ce13-b3df-4bbb-bd4f-d0f85bc580ba {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.285089] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e9a0cd-15bf-47ba-9460-80311236d415 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.303487] env[63538]: DEBUG nova.compute.provider_tree [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1077.680504] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Task: {'id': task-5101611, 'name': Rename_Task, 'duration_secs': 0.191301} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.680890] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1077.681163] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-875da56c-ab3e-431f-b300-5bcf1fda624e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.691263] env[63538]: DEBUG oslo_concurrency.lockutils [None req-76f371a5-7c41-484f-b82b-ad9b32bae572 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "0c19d662-4ae0-4ec9-93b4-9bd45822ed92" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.179s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1077.694271] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Waiting for the task: (returnval){ [ 1077.694271] env[63538]: value = "task-5101612" [ 1077.694271] env[63538]: _type = "Task" [ 1077.694271] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.711028] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Task: {'id': task-5101612, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.809447] env[63538]: DEBUG nova.scheduler.client.report [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1077.897282] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1077.897396] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1077.996954] env[63538]: DEBUG oslo_concurrency.lockutils [None req-29b0c0d8-3230-4a15-97f0-7b054fef85e6 tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "431a67e6-b90d-4930-9a86-7c49d1022ddc" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.889s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1078.088506] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6fe53242-813a-43df-9012-414f0118f28f tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "0c19d662-4ae0-4ec9-93b4-9bd45822ed92" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1078.089377] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6fe53242-813a-43df-9012-414f0118f28f tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "0c19d662-4ae0-4ec9-93b4-9bd45822ed92" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1078.090265] env[63538]: DEBUG nova.compute.manager [None req-6fe53242-813a-43df-9012-414f0118f28f tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1078.090338] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e42b8b80-0075-4ddf-a2ba-e29bcbe00776 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.099308] env[63538]: DEBUG nova.compute.manager [None req-6fe53242-813a-43df-9012-414f0118f28f tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63538) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1078.100893] env[63538]: DEBUG nova.objects.instance [None req-6fe53242-813a-43df-9012-414f0118f28f tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lazy-loading 'flavor' on Instance uuid 0c19d662-4ae0-4ec9-93b4-9bd45822ed92 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1078.206566] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Task: {'id': task-5101612, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.320758] env[63538]: DEBUG oslo_concurrency.lockutils [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.456s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1078.321416] env[63538]: DEBUG nova.compute.manager [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1078.329603] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d619ae16-ac68-43a6-84d3-2ab7604401c1 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 3.524s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1078.405549] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1078.405976] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Starting heal instance info cache {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10017}} [ 1078.610808] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fe53242-813a-43df-9012-414f0118f28f tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1078.610919] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a308fabe-d8b1-40bb-ba85-bdf3fca0827e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.619115] env[63538]: DEBUG oslo_vmware.api [None req-6fe53242-813a-43df-9012-414f0118f28f tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 1078.619115] env[63538]: value = "task-5101616" [ 1078.619115] env[63538]: _type = "Task" [ 1078.619115] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.630335] env[63538]: DEBUG oslo_vmware.api [None req-6fe53242-813a-43df-9012-414f0118f28f tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101616, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.676478] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d197ac13-37d6-457f-bfdf-3105076b041d tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "466be7db-79e4-49fd-aa3b-56fbe5c60457" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1078.676967] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d197ac13-37d6-457f-bfdf-3105076b041d tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "466be7db-79e4-49fd-aa3b-56fbe5c60457" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1078.677313] env[63538]: DEBUG nova.compute.manager [None req-d197ac13-37d6-457f-bfdf-3105076b041d tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Going to confirm migration 5 {{(pid=63538) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1078.689193] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "048573b4-26db-4a62-81e0-1bc1c3999d02" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1078.689600] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "048573b4-26db-4a62-81e0-1bc1c3999d02" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1078.705971] env[63538]: DEBUG oslo_vmware.api [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Task: {'id': task-5101612, 'name': PowerOnVM_Task, 'duration_secs': 0.686095} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.706966] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1078.707253] env[63538]: INFO nova.compute.manager [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Took 9.39 seconds to spawn the instance on the hypervisor. [ 1078.707586] env[63538]: DEBUG nova.compute.manager [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1078.708790] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7617095b-fcb8-4b17-bb10-533f2bcefb9e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.832335] env[63538]: DEBUG nova.compute.utils [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1078.836944] env[63538]: DEBUG nova.compute.manager [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1078.837248] env[63538]: DEBUG nova.network.neutron [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1078.896751] env[63538]: DEBUG nova.policy [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '21f98d3c77454d809c0aa1ca5c7dc6d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '492427e54e1048f292dab2abdac71af5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1078.937429] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "431a67e6-b90d-4930-9a86-7c49d1022ddc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1078.937795] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "431a67e6-b90d-4930-9a86-7c49d1022ddc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1078.938295] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "431a67e6-b90d-4930-9a86-7c49d1022ddc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1078.938584] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "431a67e6-b90d-4930-9a86-7c49d1022ddc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1078.938774] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "431a67e6-b90d-4930-9a86-7c49d1022ddc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1078.941659] env[63538]: INFO nova.compute.manager [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Terminating instance [ 1078.944189] env[63538]: DEBUG nova.compute.manager [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1078.944458] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1078.945504] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc40647e-f1c8-485d-a6e5-a8f25a062549 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.960108] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1078.960744] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-495526a7-b0b1-49ba-9006-92c5be9afd75 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.969652] env[63538]: DEBUG oslo_vmware.api [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 1078.969652] env[63538]: value = "task-5101617" [ 1078.969652] env[63538]: _type = "Task" [ 1078.969652] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.982355] env[63538]: DEBUG oslo_vmware.api [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101617, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.082874] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d0050d-91a2-453c-9ba4-692d9a45a08f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.093046] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b8eeca6-749c-4a4f-871d-9d6fa13099cd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.132842] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d1e17b1-8d03-4c61-8231-a7f4bc1c9af2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.151809] env[63538]: DEBUG oslo_vmware.api [None req-6fe53242-813a-43df-9012-414f0118f28f tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101616, 'name': PowerOffVM_Task, 'duration_secs': 0.200976} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.151809] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad3dd8e-0427-4cc9-8a0f-0cddbc07e64d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.154182] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fe53242-813a-43df-9012-414f0118f28f tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1079.154498] env[63538]: DEBUG nova.compute.manager [None req-6fe53242-813a-43df-9012-414f0118f28f tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1079.155533] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b490a0e-dea1-480e-af82-44d2d6ded553 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.177329] env[63538]: DEBUG nova.compute.provider_tree [None req-d619ae16-ac68-43a6-84d3-2ab7604401c1 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1079.192530] env[63538]: DEBUG nova.compute.manager [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1079.236494] env[63538]: INFO nova.compute.manager [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Took 17.96 seconds to build instance. [ 1079.249786] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d197ac13-37d6-457f-bfdf-3105076b041d tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "refresh_cache-466be7db-79e4-49fd-aa3b-56fbe5c60457" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1079.250076] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d197ac13-37d6-457f-bfdf-3105076b041d tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired lock "refresh_cache-466be7db-79e4-49fd-aa3b-56fbe5c60457" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.250291] env[63538]: DEBUG nova.network.neutron [None req-d197ac13-37d6-457f-bfdf-3105076b041d tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1079.250552] env[63538]: DEBUG nova.objects.instance [None req-d197ac13-37d6-457f-bfdf-3105076b041d tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lazy-loading 'info_cache' on Instance uuid 466be7db-79e4-49fd-aa3b-56fbe5c60457 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1079.314553] env[63538]: DEBUG nova.network.neutron [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Successfully created port: da39877d-c305-4a70-8310-b2ad992f0cc7 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1079.342100] env[63538]: DEBUG nova.compute.manager [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1079.480977] env[63538]: DEBUG oslo_vmware.api [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101617, 'name': PowerOffVM_Task, 'duration_secs': 0.249058} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.481310] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1079.481490] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1079.481797] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-354d43d0-e48d-4551-ae3a-62ea0256e378 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.553813] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1079.554256] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1079.554553] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Deleting the datastore file [datastore1] 431a67e6-b90d-4930-9a86-7c49d1022ddc {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1079.554905] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-150a5624-7ac3-4def-afa1-5e747d82eb5c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.563638] env[63538]: DEBUG oslo_vmware.api [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for the task: (returnval){ [ 1079.563638] env[63538]: value = "task-5101619" [ 1079.563638] env[63538]: _type = "Task" [ 1079.563638] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.572481] env[63538]: DEBUG oslo_vmware.api [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101619, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.681892] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6fe53242-813a-43df-9012-414f0118f28f tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "0c19d662-4ae0-4ec9-93b4-9bd45822ed92" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.593s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.683517] env[63538]: DEBUG nova.scheduler.client.report [None req-d619ae16-ac68-43a6-84d3-2ab7604401c1 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1079.717735] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1079.740852] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c07e0a81-f36c-4fe4-a538-370098fac312 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Lock "6257bf5c-8a1c-4204-9605-cc07491e14ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.482s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.080522] env[63538]: DEBUG oslo_vmware.api [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101619, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.267668] env[63538]: DEBUG nova.compute.manager [req-375d84d7-7d5d-466b-a535-0236b4b0e996 req-f00eaec3-336a-4a5a-bab2-812142826c4f service nova] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Received event network-changed-8826b736-8295-4d09-8211-ccda578b133e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1080.267945] env[63538]: DEBUG nova.compute.manager [req-375d84d7-7d5d-466b-a535-0236b4b0e996 req-f00eaec3-336a-4a5a-bab2-812142826c4f service nova] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Refreshing instance network info cache due to event network-changed-8826b736-8295-4d09-8211-ccda578b133e. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1080.268295] env[63538]: DEBUG oslo_concurrency.lockutils [req-375d84d7-7d5d-466b-a535-0236b4b0e996 req-f00eaec3-336a-4a5a-bab2-812142826c4f service nova] Acquiring lock "refresh_cache-6257bf5c-8a1c-4204-9605-cc07491e14ea" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1080.268471] env[63538]: DEBUG oslo_concurrency.lockutils [req-375d84d7-7d5d-466b-a535-0236b4b0e996 req-f00eaec3-336a-4a5a-bab2-812142826c4f service nova] Acquired lock "refresh_cache-6257bf5c-8a1c-4204-9605-cc07491e14ea" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.268646] env[63538]: DEBUG nova.network.neutron [req-375d84d7-7d5d-466b-a535-0236b4b0e996 req-f00eaec3-336a-4a5a-bab2-812142826c4f service nova] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Refreshing network info cache for port 8826b736-8295-4d09-8211-ccda578b133e {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1080.354252] env[63538]: DEBUG nova.compute.manager [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1080.382523] env[63538]: DEBUG nova.virt.hardware [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1080.382845] env[63538]: DEBUG nova.virt.hardware [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1080.382936] env[63538]: DEBUG nova.virt.hardware [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1080.383257] env[63538]: DEBUG nova.virt.hardware [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1080.383411] env[63538]: DEBUG nova.virt.hardware [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1080.383581] env[63538]: DEBUG nova.virt.hardware [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1080.383799] env[63538]: DEBUG nova.virt.hardware [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1080.383966] env[63538]: DEBUG nova.virt.hardware [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1080.384194] env[63538]: DEBUG nova.virt.hardware [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1080.384377] env[63538]: DEBUG nova.virt.hardware [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1080.384558] env[63538]: DEBUG nova.virt.hardware [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1080.385458] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90f4bcfa-de5f-446f-9a49-d0431be5b34a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.396729] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83fab84a-0bc8-427d-904a-2956527a28b7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.416836] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Didn't find any instances for network info cache update. {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10103}} [ 1080.417112] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1080.417243] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1080.417394] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1080.417652] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1080.417723] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1080.417841] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1080.417975] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63538) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10636}} [ 1080.418131] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1080.575412] env[63538]: DEBUG oslo_vmware.api [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Task: {'id': task-5101619, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.586441} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.575668] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1080.575857] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1080.576051] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1080.576469] env[63538]: INFO nova.compute.manager [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1080.576742] env[63538]: DEBUG oslo.service.loopingcall [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1080.576954] env[63538]: DEBUG nova.compute.manager [-] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1080.577069] env[63538]: DEBUG nova.network.neutron [-] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1080.604659] env[63538]: DEBUG nova.network.neutron [None req-d197ac13-37d6-457f-bfdf-3105076b041d tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Updating instance_info_cache with network_info: [{"id": "a679ee9b-3e51-4ce7-ab24-0792218d36ba", "address": "fa:16:3e:08:aa:47", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa679ee9b-3e", "ovs_interfaceid": "a679ee9b-3e51-4ce7-ab24-0792218d36ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.698309] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d619ae16-ac68-43a6-84d3-2ab7604401c1 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.373s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.704284] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.654s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.712155] env[63538]: INFO nova.compute.claims [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1080.921454] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.953882] env[63538]: DEBUG oslo_concurrency.lockutils [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "0c19d662-4ae0-4ec9-93b4-9bd45822ed92" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.953882] env[63538]: DEBUG oslo_concurrency.lockutils [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "0c19d662-4ae0-4ec9-93b4-9bd45822ed92" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.954037] env[63538]: DEBUG oslo_concurrency.lockutils [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "0c19d662-4ae0-4ec9-93b4-9bd45822ed92-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.954284] env[63538]: DEBUG oslo_concurrency.lockutils [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "0c19d662-4ae0-4ec9-93b4-9bd45822ed92-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.954484] env[63538]: DEBUG oslo_concurrency.lockutils [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "0c19d662-4ae0-4ec9-93b4-9bd45822ed92-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.956914] env[63538]: INFO nova.compute.manager [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Terminating instance [ 1080.959127] env[63538]: DEBUG nova.compute.manager [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1080.959359] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1080.960399] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e325a4d-a92d-4d5c-bc09-a9d2f1ddb354 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.969404] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1080.969768] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bae448bf-bcb8-426a-91ab-b498cdaa4ef9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.027762] env[63538]: DEBUG nova.compute.manager [req-783db3cc-a49c-4529-9214-eeb21881b0bc req-2ffe8e85-ead5-41a4-b122-83051a7294b3 service nova] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Received event network-vif-plugged-da39877d-c305-4a70-8310-b2ad992f0cc7 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1081.027762] env[63538]: DEBUG oslo_concurrency.lockutils [req-783db3cc-a49c-4529-9214-eeb21881b0bc req-2ffe8e85-ead5-41a4-b122-83051a7294b3 service nova] Acquiring lock "049518bd-d569-491a-8f79-6f0b78cf44b2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.027762] env[63538]: DEBUG oslo_concurrency.lockutils [req-783db3cc-a49c-4529-9214-eeb21881b0bc req-2ffe8e85-ead5-41a4-b122-83051a7294b3 service nova] Lock "049518bd-d569-491a-8f79-6f0b78cf44b2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.027968] env[63538]: DEBUG oslo_concurrency.lockutils [req-783db3cc-a49c-4529-9214-eeb21881b0bc req-2ffe8e85-ead5-41a4-b122-83051a7294b3 service nova] Lock "049518bd-d569-491a-8f79-6f0b78cf44b2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.028176] env[63538]: DEBUG nova.compute.manager [req-783db3cc-a49c-4529-9214-eeb21881b0bc req-2ffe8e85-ead5-41a4-b122-83051a7294b3 service nova] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] No waiting events found dispatching network-vif-plugged-da39877d-c305-4a70-8310-b2ad992f0cc7 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1081.028393] env[63538]: WARNING nova.compute.manager [req-783db3cc-a49c-4529-9214-eeb21881b0bc req-2ffe8e85-ead5-41a4-b122-83051a7294b3 service nova] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Received unexpected event network-vif-plugged-da39877d-c305-4a70-8310-b2ad992f0cc7 for instance with vm_state building and task_state spawning. [ 1081.045027] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1081.045027] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1081.045027] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Deleting the datastore file [datastore1] 0c19d662-4ae0-4ec9-93b4-9bd45822ed92 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1081.045027] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a0dfb2e4-2afa-4512-aed7-93c9b93ed792 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.053807] env[63538]: DEBUG oslo_vmware.api [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 1081.053807] env[63538]: value = "task-5101621" [ 1081.053807] env[63538]: _type = "Task" [ 1081.053807] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.065304] env[63538]: DEBUG oslo_vmware.api [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101621, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.107779] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d197ac13-37d6-457f-bfdf-3105076b041d tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Releasing lock "refresh_cache-466be7db-79e4-49fd-aa3b-56fbe5c60457" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1081.108108] env[63538]: DEBUG nova.objects.instance [None req-d197ac13-37d6-457f-bfdf-3105076b041d tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lazy-loading 'migration_context' on Instance uuid 466be7db-79e4-49fd-aa3b-56fbe5c60457 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1081.217144] env[63538]: DEBUG nova.network.neutron [req-375d84d7-7d5d-466b-a535-0236b4b0e996 req-f00eaec3-336a-4a5a-bab2-812142826c4f service nova] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Updated VIF entry in instance network info cache for port 8826b736-8295-4d09-8211-ccda578b133e. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1081.217821] env[63538]: DEBUG nova.network.neutron [req-375d84d7-7d5d-466b-a535-0236b4b0e996 req-f00eaec3-336a-4a5a-bab2-812142826c4f service nova] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Updating instance_info_cache with network_info: [{"id": "8826b736-8295-4d09-8211-ccda578b133e", "address": "fa:16:3e:92:cf:b0", "network": {"id": "5a392b91-3e04-4ce1-bf51-bdad0c03becd", "bridge": "br-int", "label": "tempest-ServersTestJSON-180232194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.154", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9294e4310e484e338932e3514d079594", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4", "external-id": "nsx-vlan-transportzone-545", "segmentation_id": 545, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8826b736-82", "ovs_interfaceid": "8826b736-8295-4d09-8211-ccda578b133e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.229595] env[63538]: DEBUG nova.network.neutron [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Successfully updated port: da39877d-c305-4a70-8310-b2ad992f0cc7 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1081.297273] env[63538]: INFO nova.scheduler.client.report [None req-d619ae16-ac68-43a6-84d3-2ab7604401c1 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Deleted allocation for migration 36b9ab62-693e-4437-b195-831dd146d846 [ 1081.567317] env[63538]: DEBUG oslo_vmware.api [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101621, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.347814} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.568467] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1081.568861] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1081.569300] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1081.569654] env[63538]: INFO nova.compute.manager [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1081.570075] env[63538]: DEBUG oslo.service.loopingcall [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1081.570547] env[63538]: DEBUG nova.compute.manager [-] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1081.571340] env[63538]: DEBUG nova.network.neutron [-] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1081.614113] env[63538]: DEBUG nova.objects.base [None req-d197ac13-37d6-457f-bfdf-3105076b041d tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Object Instance<466be7db-79e4-49fd-aa3b-56fbe5c60457> lazy-loaded attributes: info_cache,migration_context {{(pid=63538) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1081.615694] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c482bf-3112-4f53-b0fc-db6a607b2cce {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.640920] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-173e76fe-2ccf-4612-90ec-e58047742569 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.655513] env[63538]: DEBUG oslo_vmware.api [None req-d197ac13-37d6-457f-bfdf-3105076b041d tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1081.655513] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527d86ff-7380-9951-0752-7a7f3e862403" [ 1081.655513] env[63538]: _type = "Task" [ 1081.655513] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.670205] env[63538]: DEBUG oslo_vmware.api [None req-d197ac13-37d6-457f-bfdf-3105076b041d tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527d86ff-7380-9951-0752-7a7f3e862403, 'name': SearchDatastore_Task, 'duration_secs': 0.012399} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.670631] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d197ac13-37d6-457f-bfdf-3105076b041d tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.708583] env[63538]: DEBUG nova.network.neutron [-] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.724651] env[63538]: DEBUG oslo_concurrency.lockutils [req-375d84d7-7d5d-466b-a535-0236b4b0e996 req-f00eaec3-336a-4a5a-bab2-812142826c4f service nova] Releasing lock "refresh_cache-6257bf5c-8a1c-4204-9605-cc07491e14ea" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1081.737832] env[63538]: DEBUG oslo_concurrency.lockutils [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "refresh_cache-049518bd-d569-491a-8f79-6f0b78cf44b2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1081.737832] env[63538]: DEBUG oslo_concurrency.lockutils [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired lock "refresh_cache-049518bd-d569-491a-8f79-6f0b78cf44b2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.737832] env[63538]: DEBUG nova.network.neutron [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1081.804768] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d619ae16-ac68-43a6-84d3-2ab7604401c1 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 10.582s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.984683] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a2be570-f9e5-4a5c-8ed5-5d3ac4f1238c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.993405] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84cd914-9e75-4a87-894c-e45955a8523e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.028787] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a51b297-cbcd-4efc-a12e-8592d12ccdcf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.038931] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9695d8aa-4492-4df8-8b5b-159ddb43492a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.055581] env[63538]: DEBUG nova.compute.provider_tree [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1082.149830] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a5a8fd2-10ab-4256-b160-a68f4a9c9de9 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "0df15328-aebd-44c5-9c78-ee05f188ad95" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.150135] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a5a8fd2-10ab-4256-b160-a68f4a9c9de9 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "0df15328-aebd-44c5-9c78-ee05f188ad95" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.211056] env[63538]: INFO nova.compute.manager [-] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Took 1.63 seconds to deallocate network for instance. [ 1082.269025] env[63538]: DEBUG nova.network.neutron [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1082.297362] env[63538]: DEBUG nova.compute.manager [req-5d75b58a-1548-4b8c-9ddd-0b173d44d91c req-1d8c3913-703e-4d7c-8e2a-681c68d02131 service nova] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Received event network-vif-deleted-c9561ca9-cb68-4037-807e-9f89307cb528 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1082.410348] env[63538]: DEBUG oslo_concurrency.lockutils [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.411068] env[63538]: DEBUG oslo_concurrency.lockutils [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.411068] env[63538]: DEBUG oslo_concurrency.lockutils [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.411068] env[63538]: DEBUG oslo_concurrency.lockutils [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.411402] env[63538]: DEBUG oslo_concurrency.lockutils [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.415306] env[63538]: INFO nova.compute.manager [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Terminating instance [ 1082.418693] env[63538]: DEBUG nova.compute.manager [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1082.418933] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1082.419812] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9659ab1e-bc67-4037-af97-7a4831ac090d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.428557] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1082.428824] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c6310cf-0d50-4b04-ae58-07ed789a3cf5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.435095] env[63538]: DEBUG nova.network.neutron [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Updating instance_info_cache with network_info: [{"id": "da39877d-c305-4a70-8310-b2ad992f0cc7", "address": "fa:16:3e:e7:40:0a", "network": {"id": "01450801-f549-4844-80bd-208ec405c65f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684310085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "492427e54e1048f292dab2abdac71af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda39877d-c3", "ovs_interfaceid": "da39877d-c305-4a70-8310-b2ad992f0cc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.437468] env[63538]: DEBUG oslo_vmware.api [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1082.437468] env[63538]: value = "task-5101623" [ 1082.437468] env[63538]: _type = "Task" [ 1082.437468] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.442309] env[63538]: DEBUG nova.network.neutron [-] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.446887] env[63538]: DEBUG oslo_vmware.api [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101623, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.558940] env[63538]: DEBUG nova.scheduler.client.report [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1082.653979] env[63538]: INFO nova.compute.manager [None req-6a5a8fd2-10ab-4256-b160-a68f4a9c9de9 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Detaching volume dc4092b5-f968-4e95-b286-e9901b2a6c30 [ 1082.691591] env[63538]: INFO nova.virt.block_device [None req-6a5a8fd2-10ab-4256-b160-a68f4a9c9de9 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Attempting to driver detach volume dc4092b5-f968-4e95-b286-e9901b2a6c30 from mountpoint /dev/sdb [ 1082.691898] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a5a8fd2-10ab-4256-b160-a68f4a9c9de9 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Volume detach. Driver type: vmdk {{(pid=63538) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1082.692178] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a5a8fd2-10ab-4256-b160-a68f4a9c9de9 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992467', 'volume_id': 'dc4092b5-f968-4e95-b286-e9901b2a6c30', 'name': 'volume-dc4092b5-f968-4e95-b286-e9901b2a6c30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '0df15328-aebd-44c5-9c78-ee05f188ad95', 'attached_at': '', 'detached_at': '', 'volume_id': 'dc4092b5-f968-4e95-b286-e9901b2a6c30', 'serial': 'dc4092b5-f968-4e95-b286-e9901b2a6c30'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1082.693437] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc57ecbf-b9dd-44ec-af60-d9e61cc948ab {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.725390] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.726648] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c286e4e5-1504-479b-99dd-719f957091f2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.736430] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efacdb13-7e56-426a-a7f6-683c25e52044 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.765143] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c2970e2-fa65-4b4a-930b-97398602c7ae {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.788282] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a5a8fd2-10ab-4256-b160-a68f4a9c9de9 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] The volume has not been displaced from its original location: [datastore1] volume-dc4092b5-f968-4e95-b286-e9901b2a6c30/volume-dc4092b5-f968-4e95-b286-e9901b2a6c30.vmdk. No consolidation needed. {{(pid=63538) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1082.797399] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a5a8fd2-10ab-4256-b160-a68f4a9c9de9 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Reconfiguring VM instance instance-00000047 to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1082.797972] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db129ede-8d3a-44b7-b114-28733c7df817 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.827962] env[63538]: DEBUG oslo_vmware.api [None req-6a5a8fd2-10ab-4256-b160-a68f4a9c9de9 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1082.827962] env[63538]: value = "task-5101624" [ 1082.827962] env[63538]: _type = "Task" [ 1082.827962] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.838361] env[63538]: DEBUG oslo_vmware.api [None req-6a5a8fd2-10ab-4256-b160-a68f4a9c9de9 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101624, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.939219] env[63538]: DEBUG oslo_concurrency.lockutils [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Releasing lock "refresh_cache-049518bd-d569-491a-8f79-6f0b78cf44b2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1082.939592] env[63538]: DEBUG nova.compute.manager [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Instance network_info: |[{"id": "da39877d-c305-4a70-8310-b2ad992f0cc7", "address": "fa:16:3e:e7:40:0a", "network": {"id": "01450801-f549-4844-80bd-208ec405c65f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684310085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "492427e54e1048f292dab2abdac71af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda39877d-c3", "ovs_interfaceid": "da39877d-c305-4a70-8310-b2ad992f0cc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1082.940214] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e7:40:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32463b6d-4569-4755-8a29-873a028690a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'da39877d-c305-4a70-8310-b2ad992f0cc7', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1082.948462] env[63538]: DEBUG oslo.service.loopingcall [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1082.952245] env[63538]: INFO nova.compute.manager [-] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Took 1.38 seconds to deallocate network for instance. [ 1082.952556] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1082.954569] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6d13bc25-90cf-4d2a-a1f0-64ff56d5faeb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.978108] env[63538]: DEBUG oslo_vmware.api [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101623, 'name': PowerOffVM_Task, 'duration_secs': 0.513528} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.978702] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1082.978902] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1082.979216] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-17a6d385-1265-4bdf-9aa7-5dfe65071bbc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.984408] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1082.984408] env[63538]: value = "task-5101625" [ 1082.984408] env[63538]: _type = "Task" [ 1082.984408] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.994100] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101625, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.004974] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "17350ce4-555b-4f00-9a75-de32a4453141" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.004974] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "17350ce4-555b-4f00-9a75-de32a4453141" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.066884] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.364s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.067314] env[63538]: DEBUG nova.compute.manager [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1083.070675] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.353s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.072831] env[63538]: INFO nova.compute.claims [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1083.085801] env[63538]: DEBUG nova.compute.manager [req-4283aa33-7ade-47e6-bd13-24af5532d0e5 req-3ed34cdb-13a3-47e7-a06c-6828adca8754 service nova] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Received event network-changed-da39877d-c305-4a70-8310-b2ad992f0cc7 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1083.086053] env[63538]: DEBUG nova.compute.manager [req-4283aa33-7ade-47e6-bd13-24af5532d0e5 req-3ed34cdb-13a3-47e7-a06c-6828adca8754 service nova] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Refreshing instance network info cache due to event network-changed-da39877d-c305-4a70-8310-b2ad992f0cc7. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1083.086492] env[63538]: DEBUG oslo_concurrency.lockutils [req-4283aa33-7ade-47e6-bd13-24af5532d0e5 req-3ed34cdb-13a3-47e7-a06c-6828adca8754 service nova] Acquiring lock "refresh_cache-049518bd-d569-491a-8f79-6f0b78cf44b2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1083.087870] env[63538]: DEBUG oslo_concurrency.lockutils [req-4283aa33-7ade-47e6-bd13-24af5532d0e5 req-3ed34cdb-13a3-47e7-a06c-6828adca8754 service nova] Acquired lock "refresh_cache-049518bd-d569-491a-8f79-6f0b78cf44b2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.087870] env[63538]: DEBUG nova.network.neutron [req-4283aa33-7ade-47e6-bd13-24af5532d0e5 req-3ed34cdb-13a3-47e7-a06c-6828adca8754 service nova] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Refreshing network info cache for port da39877d-c305-4a70-8310-b2ad992f0cc7 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1083.098541] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1083.098541] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1083.098890] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Deleting the datastore file [datastore2] fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1083.099248] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-97c86126-e7cb-493e-abc1-ac804fccbc18 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.111410] env[63538]: DEBUG oslo_vmware.api [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1083.111410] env[63538]: value = "task-5101627" [ 1083.111410] env[63538]: _type = "Task" [ 1083.111410] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.123382] env[63538]: DEBUG oslo_vmware.api [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101627, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.341796] env[63538]: DEBUG oslo_vmware.api [None req-6a5a8fd2-10ab-4256-b160-a68f4a9c9de9 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101624, 'name': ReconfigVM_Task, 'duration_secs': 0.354201} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.342185] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a5a8fd2-10ab-4256-b160-a68f4a9c9de9 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Reconfigured VM instance instance-00000047 to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1083.348675] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6353162-915c-4f26-bad1-14e64cf7b99a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.365303] env[63538]: DEBUG oslo_vmware.api [None req-6a5a8fd2-10ab-4256-b160-a68f4a9c9de9 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1083.365303] env[63538]: value = "task-5101628" [ 1083.365303] env[63538]: _type = "Task" [ 1083.365303] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.376019] env[63538]: DEBUG oslo_vmware.api [None req-6a5a8fd2-10ab-4256-b160-a68f4a9c9de9 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101628, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.480202] env[63538]: DEBUG oslo_concurrency.lockutils [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.495772] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101625, 'name': CreateVM_Task, 'duration_secs': 0.391995} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.495949] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1083.496752] env[63538]: DEBUG oslo_concurrency.lockutils [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1083.496927] env[63538]: DEBUG oslo_concurrency.lockutils [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.497324] env[63538]: DEBUG oslo_concurrency.lockutils [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1083.497669] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-332bac40-4e47-4a7e-822d-bd7253e514fe {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.503362] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1083.503362] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52224ca9-d995-4aa4-9d74-9cbebb5b5518" [ 1083.503362] env[63538]: _type = "Task" [ 1083.503362] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.507032] env[63538]: DEBUG nova.compute.manager [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1083.515041] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52224ca9-d995-4aa4-9d74-9cbebb5b5518, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.578461] env[63538]: DEBUG nova.compute.utils [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1083.581752] env[63538]: DEBUG nova.compute.manager [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1083.582047] env[63538]: DEBUG nova.network.neutron [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1083.624189] env[63538]: DEBUG oslo_vmware.api [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101627, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.362918} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.624707] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1083.625017] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1083.625403] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1083.625618] env[63538]: INFO nova.compute.manager [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1083.625879] env[63538]: DEBUG oslo.service.loopingcall [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1083.626785] env[63538]: DEBUG nova.compute.manager [-] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1083.626785] env[63538]: DEBUG nova.network.neutron [-] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1083.661602] env[63538]: DEBUG nova.policy [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ebe77c0e32ce4a32b290ba5088e107f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7063c42297c24f2baf7271fa25dec927', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1083.877692] env[63538]: DEBUG oslo_vmware.api [None req-6a5a8fd2-10ab-4256-b160-a68f4a9c9de9 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101628, 'name': ReconfigVM_Task, 'duration_secs': 0.198666} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.877954] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a5a8fd2-10ab-4256-b160-a68f4a9c9de9 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992467', 'volume_id': 'dc4092b5-f968-4e95-b286-e9901b2a6c30', 'name': 'volume-dc4092b5-f968-4e95-b286-e9901b2a6c30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '0df15328-aebd-44c5-9c78-ee05f188ad95', 'attached_at': '', 'detached_at': '', 'volume_id': 'dc4092b5-f968-4e95-b286-e9901b2a6c30', 'serial': 'dc4092b5-f968-4e95-b286-e9901b2a6c30'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1083.935921] env[63538]: DEBUG nova.network.neutron [req-4283aa33-7ade-47e6-bd13-24af5532d0e5 req-3ed34cdb-13a3-47e7-a06c-6828adca8754 service nova] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Updated VIF entry in instance network info cache for port da39877d-c305-4a70-8310-b2ad992f0cc7. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1083.936387] env[63538]: DEBUG nova.network.neutron [req-4283aa33-7ade-47e6-bd13-24af5532d0e5 req-3ed34cdb-13a3-47e7-a06c-6828adca8754 service nova] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Updating instance_info_cache with network_info: [{"id": "da39877d-c305-4a70-8310-b2ad992f0cc7", "address": "fa:16:3e:e7:40:0a", "network": {"id": "01450801-f549-4844-80bd-208ec405c65f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684310085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "492427e54e1048f292dab2abdac71af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda39877d-c3", "ovs_interfaceid": "da39877d-c305-4a70-8310-b2ad992f0cc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.018742] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52224ca9-d995-4aa4-9d74-9cbebb5b5518, 'name': SearchDatastore_Task, 'duration_secs': 0.024377} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.021365] env[63538]: DEBUG oslo_concurrency.lockutils [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1084.021365] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1084.021551] env[63538]: DEBUG oslo_concurrency.lockutils [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1084.021699] env[63538]: DEBUG oslo_concurrency.lockutils [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.021889] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1084.022419] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa8f36b5-2533-4592-8d86-7302d049dcec {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.032951] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1084.033169] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1084.034107] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f7407ae-0fdc-45b0-bad0-9a1c50d7a8b4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.037873] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1084.040055] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1084.040055] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52023d44-2db7-4aba-0483-41f4399693d5" [ 1084.040055] env[63538]: _type = "Task" [ 1084.040055] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.055249] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52023d44-2db7-4aba-0483-41f4399693d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.083086] env[63538]: DEBUG nova.network.neutron [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Successfully created port: dfaa4640-ae2a-444b-aa92-e24dd9eca692 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1084.086763] env[63538]: DEBUG nova.compute.manager [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1084.328726] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-512550b4-f798-4f56-a462-d4be308a3f8f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.341469] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baebfedf-da41-4f19-be87-5650e4683015 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.380328] env[63538]: DEBUG nova.network.neutron [-] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.385291] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbfdb563-5f26-4e05-9382-bd5f7d757e00 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.396079] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae84bd60-dbda-4647-b8d3-6259e14214f8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.405963] env[63538]: DEBUG nova.compute.manager [req-ed86cd4c-a692-4499-8cda-717eda08f21c req-d17e8bf6-129e-47d8-a9fb-c14e88cc35f7 service nova] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Received event network-vif-deleted-c5eca2fb-a7a7-43a0-a2a0-5016e8c11f78 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1084.415107] env[63538]: DEBUG nova.compute.provider_tree [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1084.426673] env[63538]: DEBUG nova.objects.instance [None req-6a5a8fd2-10ab-4256-b160-a68f4a9c9de9 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lazy-loading 'flavor' on Instance uuid 0df15328-aebd-44c5-9c78-ee05f188ad95 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1084.439082] env[63538]: DEBUG oslo_concurrency.lockutils [req-4283aa33-7ade-47e6-bd13-24af5532d0e5 req-3ed34cdb-13a3-47e7-a06c-6828adca8754 service nova] Releasing lock "refresh_cache-049518bd-d569-491a-8f79-6f0b78cf44b2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1084.439362] env[63538]: DEBUG nova.compute.manager [req-4283aa33-7ade-47e6-bd13-24af5532d0e5 req-3ed34cdb-13a3-47e7-a06c-6828adca8754 service nova] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Received event network-vif-deleted-07db4bb7-d084-4f5d-89b1-07062ff397f6 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1084.551681] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52023d44-2db7-4aba-0483-41f4399693d5, 'name': SearchDatastore_Task, 'duration_secs': 0.014181} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.552526] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5023d24f-bb2c-4b8a-a0b8-aa24bab3c6a0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.558653] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1084.558653] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5251de23-9213-9d40-5b62-4d40d979b0d2" [ 1084.558653] env[63538]: _type = "Task" [ 1084.558653] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.566812] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5251de23-9213-9d40-5b62-4d40d979b0d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.885708] env[63538]: INFO nova.compute.manager [-] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Took 1.26 seconds to deallocate network for instance. [ 1084.918585] env[63538]: DEBUG nova.scheduler.client.report [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1085.070880] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5251de23-9213-9d40-5b62-4d40d979b0d2, 'name': SearchDatastore_Task, 'duration_secs': 0.023672} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.071281] env[63538]: DEBUG oslo_concurrency.lockutils [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1085.071455] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 049518bd-d569-491a-8f79-6f0b78cf44b2/049518bd-d569-491a-8f79-6f0b78cf44b2.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1085.072102] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9bd9f9a0-dd03-46df-a24b-d1206e85db24 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.079728] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1085.079728] env[63538]: value = "task-5101630" [ 1085.079728] env[63538]: _type = "Task" [ 1085.079728] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.090685] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101630, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.101308] env[63538]: DEBUG nova.compute.manager [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1085.128099] env[63538]: DEBUG nova.virt.hardware [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1085.128389] env[63538]: DEBUG nova.virt.hardware [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1085.128560] env[63538]: DEBUG nova.virt.hardware [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1085.128749] env[63538]: DEBUG nova.virt.hardware [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1085.128902] env[63538]: DEBUG nova.virt.hardware [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1085.129082] env[63538]: DEBUG nova.virt.hardware [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1085.129325] env[63538]: DEBUG nova.virt.hardware [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1085.129493] env[63538]: DEBUG nova.virt.hardware [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1085.129669] env[63538]: DEBUG nova.virt.hardware [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1085.129905] env[63538]: DEBUG nova.virt.hardware [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1085.130035] env[63538]: DEBUG nova.virt.hardware [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1085.130930] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-124eec3e-50e4-4ccb-b8f5-93c4306e356a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.139749] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b46d72e0-9abd-44e0-ab79-3f0c9d685d5d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.393484] env[63538]: DEBUG oslo_concurrency.lockutils [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1085.423874] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.353s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.424669] env[63538]: DEBUG nova.compute.manager [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1085.428257] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 4.507s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1085.428506] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.428695] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63538) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1085.429062] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d197ac13-37d6-457f-bfdf-3105076b041d tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 3.759s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1085.431469] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd151daa-c061-47b1-9e08-4549c52f1e65 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.434907] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a5a8fd2-10ab-4256-b160-a68f4a9c9de9 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "0df15328-aebd-44c5-9c78-ee05f188ad95" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.285s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.442953] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4befcc21-a484-4661-a1ef-738e5d67976b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.462797] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa0966c-ca27-4552-a49e-5890154b82e4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.474517] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8413df5f-d402-401d-b5f1-4da86fe16888 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.507178] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179256MB free_disk=95GB free_vcpus=48 pci_devices=None {{(pid=63538) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1085.507178] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1085.595391] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101630, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.690087] env[63538]: DEBUG nova.compute.manager [req-f4dc50b6-7265-432a-8a9e-d0d0c49b51af req-1862fd1b-c89b-42d7-ad31-b952252b797f service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Received event network-vif-plugged-dfaa4640-ae2a-444b-aa92-e24dd9eca692 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1085.690340] env[63538]: DEBUG oslo_concurrency.lockutils [req-f4dc50b6-7265-432a-8a9e-d0d0c49b51af req-1862fd1b-c89b-42d7-ad31-b952252b797f service nova] Acquiring lock "42af31f3-a9d0-4fdd-99fa-442ebe915277-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1085.690559] env[63538]: DEBUG oslo_concurrency.lockutils [req-f4dc50b6-7265-432a-8a9e-d0d0c49b51af req-1862fd1b-c89b-42d7-ad31-b952252b797f service nova] Lock "42af31f3-a9d0-4fdd-99fa-442ebe915277-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1085.690734] env[63538]: DEBUG oslo_concurrency.lockutils [req-f4dc50b6-7265-432a-8a9e-d0d0c49b51af req-1862fd1b-c89b-42d7-ad31-b952252b797f service nova] Lock "42af31f3-a9d0-4fdd-99fa-442ebe915277-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.690985] env[63538]: DEBUG nova.compute.manager [req-f4dc50b6-7265-432a-8a9e-d0d0c49b51af req-1862fd1b-c89b-42d7-ad31-b952252b797f service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] No waiting events found dispatching network-vif-plugged-dfaa4640-ae2a-444b-aa92-e24dd9eca692 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1085.694233] env[63538]: WARNING nova.compute.manager [req-f4dc50b6-7265-432a-8a9e-d0d0c49b51af req-1862fd1b-c89b-42d7-ad31-b952252b797f service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Received unexpected event network-vif-plugged-dfaa4640-ae2a-444b-aa92-e24dd9eca692 for instance with vm_state building and task_state spawning. [ 1085.795314] env[63538]: DEBUG nova.network.neutron [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Successfully updated port: dfaa4640-ae2a-444b-aa92-e24dd9eca692 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1085.933031] env[63538]: DEBUG nova.compute.utils [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1085.934398] env[63538]: DEBUG nova.compute.manager [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1085.934577] env[63538]: DEBUG nova.network.neutron [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1085.984764] env[63538]: DEBUG nova.policy [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f315670d336b49d6a732297656ce515a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df090f9a727d4cf4a0f466e27928bdc6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1086.093074] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101630, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.632402} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.093437] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 049518bd-d569-491a-8f79-6f0b78cf44b2/049518bd-d569-491a-8f79-6f0b78cf44b2.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1086.093718] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1086.094163] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-13dea97f-9b4a-4a7e-a140-83851e7aa83f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.102197] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1086.102197] env[63538]: value = "task-5101632" [ 1086.102197] env[63538]: _type = "Task" [ 1086.102197] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.117936] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101632, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.281785] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d628a3-8277-4d5d-8c6e-efb332dee50e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.291194] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63b4335-b939-4d9d-888e-8b2838d074c9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.297959] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1086.297959] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.297959] env[63538]: DEBUG nova.network.neutron [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1086.329918] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66957eb4-3e95-4666-a8fa-5b2c55f3e520 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.343102] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f226d0f-1bec-40a0-ab85-ee0350475335 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.359176] env[63538]: DEBUG nova.compute.provider_tree [None req-d197ac13-37d6-457f-bfdf-3105076b041d tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1086.378215] env[63538]: DEBUG nova.network.neutron [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1086.438463] env[63538]: DEBUG nova.compute.manager [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1086.465627] env[63538]: DEBUG nova.network.neutron [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Successfully created port: 3de39e87-f579-458e-a713-326821c5daa5 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1086.521723] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "0df15328-aebd-44c5-9c78-ee05f188ad95" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.522063] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "0df15328-aebd-44c5-9c78-ee05f188ad95" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.522325] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "0df15328-aebd-44c5-9c78-ee05f188ad95-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.522567] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "0df15328-aebd-44c5-9c78-ee05f188ad95-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.522775] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "0df15328-aebd-44c5-9c78-ee05f188ad95-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1086.525301] env[63538]: INFO nova.compute.manager [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Terminating instance [ 1086.528990] env[63538]: DEBUG nova.compute.manager [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1086.529269] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1086.530249] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb614aa-c12e-4450-b98f-9e73155dc739 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.539850] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1086.541550] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-93797ccf-f8c3-4509-8bd5-114285c8efd7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.551946] env[63538]: DEBUG oslo_vmware.api [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1086.551946] env[63538]: value = "task-5101633" [ 1086.551946] env[63538]: _type = "Task" [ 1086.551946] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.565074] env[63538]: DEBUG oslo_vmware.api [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101633, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.614188] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101632, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.172347} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.614516] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1086.615515] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee9fa85b-e871-4c1d-b4a3-6f6ad69517e6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.642824] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 049518bd-d569-491a-8f79-6f0b78cf44b2/049518bd-d569-491a-8f79-6f0b78cf44b2.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1086.643205] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b132f33-ad7b-456e-a1ea-17eee7a40e06 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.664467] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1086.664467] env[63538]: value = "task-5101634" [ 1086.664467] env[63538]: _type = "Task" [ 1086.664467] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.673564] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101634, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.701260] env[63538]: DEBUG nova.network.neutron [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Updating instance_info_cache with network_info: [{"id": "dfaa4640-ae2a-444b-aa92-e24dd9eca692", "address": "fa:16:3e:f0:24:40", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfaa4640-ae", "ovs_interfaceid": "dfaa4640-ae2a-444b-aa92-e24dd9eca692", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.865022] env[63538]: DEBUG nova.scheduler.client.report [None req-d197ac13-37d6-457f-bfdf-3105076b041d tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1087.063130] env[63538]: DEBUG oslo_vmware.api [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101633, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.151092] env[63538]: DEBUG oslo_vmware.rw_handles [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fdc36b-50c0-7472-8677-3fac6002b3ca/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1087.152048] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46e9b9f-7b10-4286-956b-f823fcd8f5ea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.158555] env[63538]: DEBUG oslo_vmware.rw_handles [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fdc36b-50c0-7472-8677-3fac6002b3ca/disk-0.vmdk is in state: ready. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1087.158754] env[63538]: ERROR oslo_vmware.rw_handles [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fdc36b-50c0-7472-8677-3fac6002b3ca/disk-0.vmdk due to incomplete transfer. [ 1087.158984] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d26cfb52-00e6-4634-a32c-ca549014ef2d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.170616] env[63538]: DEBUG oslo_vmware.rw_handles [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fdc36b-50c0-7472-8677-3fac6002b3ca/disk-0.vmdk. {{(pid=63538) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1087.170827] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Uploaded image 19a18d5a-92a4-4ea9-80ed-b2e672628ea8 to the Glance image server {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 1087.173063] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Destroying the VM {{(pid=63538) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 1087.173683] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2a2a9b0a-de04-420f-a99f-8f157837369d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.178293] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101634, 'name': ReconfigVM_Task, 'duration_secs': 0.273836} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.178952] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 049518bd-d569-491a-8f79-6f0b78cf44b2/049518bd-d569-491a-8f79-6f0b78cf44b2.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1087.180161] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75fe304b-7f44-4035-9de8-e94e8948ff86 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.181686] env[63538]: DEBUG oslo_vmware.api [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1087.181686] env[63538]: value = "task-5101635" [ 1087.181686] env[63538]: _type = "Task" [ 1087.181686] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.186126] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1087.186126] env[63538]: value = "task-5101636" [ 1087.186126] env[63538]: _type = "Task" [ 1087.186126] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.192457] env[63538]: DEBUG oslo_vmware.api [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101635, 'name': Destroy_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.197324] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101636, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.203049] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1087.203437] env[63538]: DEBUG nova.compute.manager [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Instance network_info: |[{"id": "dfaa4640-ae2a-444b-aa92-e24dd9eca692", "address": "fa:16:3e:f0:24:40", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfaa4640-ae", "ovs_interfaceid": "dfaa4640-ae2a-444b-aa92-e24dd9eca692", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1087.203616] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:24:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '412cde91-d0f0-4193-b36b-d8b9d17384c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dfaa4640-ae2a-444b-aa92-e24dd9eca692', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1087.211041] env[63538]: DEBUG oslo.service.loopingcall [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1087.211251] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1087.211481] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ec9bb1d9-8cc2-4a15-be4f-866981da75c3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.240498] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1087.240498] env[63538]: value = "task-5101637" [ 1087.240498] env[63538]: _type = "Task" [ 1087.240498] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.254269] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101637, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.448665] env[63538]: DEBUG nova.compute.manager [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1087.476623] env[63538]: DEBUG nova.virt.hardware [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1087.477179] env[63538]: DEBUG nova.virt.hardware [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1087.477519] env[63538]: DEBUG nova.virt.hardware [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1087.477831] env[63538]: DEBUG nova.virt.hardware [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1087.477998] env[63538]: DEBUG nova.virt.hardware [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1087.478173] env[63538]: DEBUG nova.virt.hardware [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1087.478406] env[63538]: DEBUG nova.virt.hardware [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1087.478613] env[63538]: DEBUG nova.virt.hardware [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1087.478811] env[63538]: DEBUG nova.virt.hardware [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1087.479020] env[63538]: DEBUG nova.virt.hardware [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1087.479230] env[63538]: DEBUG nova.virt.hardware [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1087.480337] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae3406b-0f83-4349-9f9b-ecdcf83465e9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.490062] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b667a9-0adc-4d8a-85f4-485b6fbb2c9f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.563761] env[63538]: DEBUG oslo_vmware.api [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101633, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.698880] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101636, 'name': Rename_Task, 'duration_secs': 0.180614} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.699330] env[63538]: DEBUG oslo_vmware.api [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101635, 'name': Destroy_Task, 'duration_secs': 0.481734} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.699439] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1087.699681] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Destroyed the VM [ 1087.699892] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Deleting Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1087.700148] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e13360d2-de87-41fb-a988-8d0c3e336356 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.701810] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0269cd27-b5fc-43d4-af46-79dc2f772f1e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.710071] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1087.710071] env[63538]: value = "task-5101639" [ 1087.710071] env[63538]: _type = "Task" [ 1087.710071] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.711127] env[63538]: DEBUG oslo_vmware.api [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1087.711127] env[63538]: value = "task-5101638" [ 1087.711127] env[63538]: _type = "Task" [ 1087.711127] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.720201] env[63538]: DEBUG nova.compute.manager [req-2ad4a82c-cd16-44b6-9d95-423e44ac85f8 req-61f08fbd-af88-4789-a4eb-cec23cb96157 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Received event network-changed-dfaa4640-ae2a-444b-aa92-e24dd9eca692 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1087.720388] env[63538]: DEBUG nova.compute.manager [req-2ad4a82c-cd16-44b6-9d95-423e44ac85f8 req-61f08fbd-af88-4789-a4eb-cec23cb96157 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Refreshing instance network info cache due to event network-changed-dfaa4640-ae2a-444b-aa92-e24dd9eca692. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1087.720608] env[63538]: DEBUG oslo_concurrency.lockutils [req-2ad4a82c-cd16-44b6-9d95-423e44ac85f8 req-61f08fbd-af88-4789-a4eb-cec23cb96157 service nova] Acquiring lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1087.720773] env[63538]: DEBUG oslo_concurrency.lockutils [req-2ad4a82c-cd16-44b6-9d95-423e44ac85f8 req-61f08fbd-af88-4789-a4eb-cec23cb96157 service nova] Acquired lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.720944] env[63538]: DEBUG nova.network.neutron [req-2ad4a82c-cd16-44b6-9d95-423e44ac85f8 req-61f08fbd-af88-4789-a4eb-cec23cb96157 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Refreshing network info cache for port dfaa4640-ae2a-444b-aa92-e24dd9eca692 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1087.726627] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101639, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.731748] env[63538]: DEBUG oslo_vmware.api [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101638, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.752248] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101637, 'name': CreateVM_Task, 'duration_secs': 0.44755} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.752441] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1087.753186] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1087.753362] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.753705] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1087.753977] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76f331df-f392-4412-8889-0af7f9516efb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.760104] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1087.760104] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d85f9d-d338-f3a5-56ae-255f80633b5d" [ 1087.760104] env[63538]: _type = "Task" [ 1087.760104] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.771772] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d85f9d-d338-f3a5-56ae-255f80633b5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.877138] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d197ac13-37d6-457f-bfdf-3105076b041d tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.448s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1087.880426] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.155s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1087.880713] env[63538]: DEBUG nova.objects.instance [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lazy-loading 'resources' on Instance uuid 431a67e6-b90d-4930-9a86-7c49d1022ddc {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1088.064146] env[63538]: DEBUG oslo_vmware.api [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101633, 'name': PowerOffVM_Task, 'duration_secs': 1.062172} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.064545] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1088.064829] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1088.065237] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ed93637c-d16e-44e8-a6cc-ff098f5a60db {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.168864] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1088.169288] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1088.169558] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Deleting the datastore file [datastore2] 0df15328-aebd-44c5-9c78-ee05f188ad95 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1088.169862] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aa18a866-4911-48de-8ea3-b2b76ad7b408 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.179791] env[63538]: DEBUG oslo_vmware.api [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1088.179791] env[63538]: value = "task-5101642" [ 1088.179791] env[63538]: _type = "Task" [ 1088.179791] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.193726] env[63538]: DEBUG oslo_vmware.api [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101642, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.225767] env[63538]: DEBUG oslo_vmware.api [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101638, 'name': RemoveSnapshot_Task} progress is 17%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.232499] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101639, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.272597] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d85f9d-d338-f3a5-56ae-255f80633b5d, 'name': SearchDatastore_Task, 'duration_secs': 0.011988} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.273091] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1088.273759] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1088.273759] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1088.273968] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.274217] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1088.274511] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-25acddaf-7455-4269-b365-7fa54300c68e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.301785] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1088.302094] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1088.302993] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f64e5b34-09bb-4890-8fe5-2649360b4502 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.310267] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1088.310267] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52618773-e976-3849-b4be-1f763d891c5b" [ 1088.310267] env[63538]: _type = "Task" [ 1088.310267] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.319424] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52618773-e976-3849-b4be-1f763d891c5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.450432] env[63538]: INFO nova.scheduler.client.report [None req-d197ac13-37d6-457f-bfdf-3105076b041d tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Deleted allocation for migration 84c74eba-0557-41e9-a3f5-396efa857140 [ 1088.602571] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d236432c-a927-464c-9d32-0c42a7c79084 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.610854] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a9f6e8-8db5-448d-bb21-194c7490a0ce {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.646767] env[63538]: DEBUG nova.network.neutron [req-2ad4a82c-cd16-44b6-9d95-423e44ac85f8 req-61f08fbd-af88-4789-a4eb-cec23cb96157 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Updated VIF entry in instance network info cache for port dfaa4640-ae2a-444b-aa92-e24dd9eca692. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1088.647133] env[63538]: DEBUG nova.network.neutron [req-2ad4a82c-cd16-44b6-9d95-423e44ac85f8 req-61f08fbd-af88-4789-a4eb-cec23cb96157 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Updating instance_info_cache with network_info: [{"id": "dfaa4640-ae2a-444b-aa92-e24dd9eca692", "address": "fa:16:3e:f0:24:40", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfaa4640-ae", "ovs_interfaceid": "dfaa4640-ae2a-444b-aa92-e24dd9eca692", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.649928] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25de3e72-99ba-42a5-9f2f-e6548a91dc9f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.658814] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40cde10c-7f6a-48ea-b998-a3c331501b89 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.674320] env[63538]: DEBUG nova.compute.provider_tree [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1088.677359] env[63538]: DEBUG nova.network.neutron [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Successfully updated port: 3de39e87-f579-458e-a713-326821c5daa5 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1088.691604] env[63538]: DEBUG oslo_vmware.api [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101642, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.204998} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.691720] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1088.691885] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1088.692081] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1088.692264] env[63538]: INFO nova.compute.manager [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Took 2.16 seconds to destroy the instance on the hypervisor. [ 1088.692515] env[63538]: DEBUG oslo.service.loopingcall [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1088.692708] env[63538]: DEBUG nova.compute.manager [-] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1088.692805] env[63538]: DEBUG nova.network.neutron [-] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1088.724427] env[63538]: DEBUG oslo_vmware.api [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101638, 'name': RemoveSnapshot_Task, 'duration_secs': 0.638877} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.727613] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Deleted Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1088.727886] env[63538]: INFO nova.compute.manager [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Took 17.43 seconds to snapshot the instance on the hypervisor. [ 1088.730257] env[63538]: DEBUG oslo_vmware.api [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101639, 'name': PowerOnVM_Task, 'duration_secs': 0.745325} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.732363] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1088.732574] env[63538]: INFO nova.compute.manager [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Took 8.38 seconds to spawn the instance on the hypervisor. [ 1088.732777] env[63538]: DEBUG nova.compute.manager [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1088.733754] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a178d948-c6cf-47db-b435-26d71beb4fe2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.821915] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52618773-e976-3849-b4be-1f763d891c5b, 'name': SearchDatastore_Task, 'duration_secs': 0.023014} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.822799] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-066cbbae-ad99-408a-8a26-29d7275123cc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.829268] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1088.829268] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52073210-9db3-b2c4-444d-37c659a1a5db" [ 1088.829268] env[63538]: _type = "Task" [ 1088.829268] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.838338] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52073210-9db3-b2c4-444d-37c659a1a5db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.962024] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d197ac13-37d6-457f-bfdf-3105076b041d tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "466be7db-79e4-49fd-aa3b-56fbe5c60457" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 10.284s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.153537] env[63538]: DEBUG oslo_concurrency.lockutils [req-2ad4a82c-cd16-44b6-9d95-423e44ac85f8 req-61f08fbd-af88-4789-a4eb-cec23cb96157 service nova] Releasing lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1089.178344] env[63538]: DEBUG nova.scheduler.client.report [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1089.182900] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "refresh_cache-048573b4-26db-4a62-81e0-1bc1c3999d02" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1089.183193] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquired lock "refresh_cache-048573b4-26db-4a62-81e0-1bc1c3999d02" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.183412] env[63538]: DEBUG nova.network.neutron [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1089.253166] env[63538]: INFO nova.compute.manager [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Took 14.86 seconds to build instance. [ 1089.295318] env[63538]: DEBUG nova.compute.manager [None req-346e4a21-aad3-4f86-8fcb-e9842e737028 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Found 2 images (rotation: 2) {{(pid=63538) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1089.342451] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52073210-9db3-b2c4-444d-37c659a1a5db, 'name': SearchDatastore_Task, 'duration_secs': 0.010068} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.342807] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1089.343139] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 42af31f3-a9d0-4fdd-99fa-442ebe915277/42af31f3-a9d0-4fdd-99fa-442ebe915277.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1089.343421] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a8c21841-1bdc-424d-a894-fd179ca390e3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.359029] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1089.359029] env[63538]: value = "task-5101643" [ 1089.359029] env[63538]: _type = "Task" [ 1089.359029] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.365845] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101643, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.686120] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.806s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.691613] env[63538]: DEBUG oslo_concurrency.lockutils [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.211s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.691777] env[63538]: DEBUG nova.objects.instance [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lazy-loading 'resources' on Instance uuid 0c19d662-4ae0-4ec9-93b4-9bd45822ed92 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1089.714007] env[63538]: INFO nova.scheduler.client.report [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Deleted allocations for instance 431a67e6-b90d-4930-9a86-7c49d1022ddc [ 1089.757778] env[63538]: DEBUG oslo_concurrency.lockutils [None req-67715192-f64b-407b-9c24-a615bf721ac4 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "049518bd-d569-491a-8f79-6f0b78cf44b2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.379s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.759739] env[63538]: DEBUG nova.network.neutron [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1089.769893] env[63538]: DEBUG nova.network.neutron [-] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.868565] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101643, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.964975] env[63538]: DEBUG nova.compute.manager [req-512fc1fd-c134-4dca-a387-05666eb2d302 req-7cd148a3-dcc8-4442-b111-e1619c46b95c service nova] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Received event network-vif-plugged-3de39e87-f579-458e-a713-326821c5daa5 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1089.965253] env[63538]: DEBUG oslo_concurrency.lockutils [req-512fc1fd-c134-4dca-a387-05666eb2d302 req-7cd148a3-dcc8-4442-b111-e1619c46b95c service nova] Acquiring lock "048573b4-26db-4a62-81e0-1bc1c3999d02-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.965517] env[63538]: DEBUG oslo_concurrency.lockutils [req-512fc1fd-c134-4dca-a387-05666eb2d302 req-7cd148a3-dcc8-4442-b111-e1619c46b95c service nova] Lock "048573b4-26db-4a62-81e0-1bc1c3999d02-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.965706] env[63538]: DEBUG oslo_concurrency.lockutils [req-512fc1fd-c134-4dca-a387-05666eb2d302 req-7cd148a3-dcc8-4442-b111-e1619c46b95c service nova] Lock "048573b4-26db-4a62-81e0-1bc1c3999d02-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.966643] env[63538]: DEBUG nova.compute.manager [req-512fc1fd-c134-4dca-a387-05666eb2d302 req-7cd148a3-dcc8-4442-b111-e1619c46b95c service nova] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] No waiting events found dispatching network-vif-plugged-3de39e87-f579-458e-a713-326821c5daa5 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1089.966643] env[63538]: WARNING nova.compute.manager [req-512fc1fd-c134-4dca-a387-05666eb2d302 req-7cd148a3-dcc8-4442-b111-e1619c46b95c service nova] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Received unexpected event network-vif-plugged-3de39e87-f579-458e-a713-326821c5daa5 for instance with vm_state building and task_state spawning. [ 1089.966643] env[63538]: DEBUG nova.compute.manager [req-512fc1fd-c134-4dca-a387-05666eb2d302 req-7cd148a3-dcc8-4442-b111-e1619c46b95c service nova] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Received event network-changed-3de39e87-f579-458e-a713-326821c5daa5 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1089.966643] env[63538]: DEBUG nova.compute.manager [req-512fc1fd-c134-4dca-a387-05666eb2d302 req-7cd148a3-dcc8-4442-b111-e1619c46b95c service nova] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Refreshing instance network info cache due to event network-changed-3de39e87-f579-458e-a713-326821c5daa5. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1089.966643] env[63538]: DEBUG oslo_concurrency.lockutils [req-512fc1fd-c134-4dca-a387-05666eb2d302 req-7cd148a3-dcc8-4442-b111-e1619c46b95c service nova] Acquiring lock "refresh_cache-048573b4-26db-4a62-81e0-1bc1c3999d02" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1089.980799] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "466be7db-79e4-49fd-aa3b-56fbe5c60457" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.980912] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "466be7db-79e4-49fd-aa3b-56fbe5c60457" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.981182] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "466be7db-79e4-49fd-aa3b-56fbe5c60457-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.981679] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "466be7db-79e4-49fd-aa3b-56fbe5c60457-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.981679] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "466be7db-79e4-49fd-aa3b-56fbe5c60457-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.984593] env[63538]: INFO nova.compute.manager [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Terminating instance [ 1089.986930] env[63538]: DEBUG nova.compute.manager [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1089.987201] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1089.988812] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce9bcdfc-d846-48fe-b51f-5324d48c73e8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.998673] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1089.998673] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cffbc1c5-fcab-4a95-803a-144f664600a7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.007520] env[63538]: DEBUG oslo_vmware.api [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1090.007520] env[63538]: value = "task-5101644" [ 1090.007520] env[63538]: _type = "Task" [ 1090.007520] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.011785] env[63538]: DEBUG nova.network.neutron [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Updating instance_info_cache with network_info: [{"id": "3de39e87-f579-458e-a713-326821c5daa5", "address": "fa:16:3e:f4:54:5a", "network": {"id": "4d592811-793b-45d6-a6d3-fff8ca7bbf30", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-654803980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df090f9a727d4cf4a0f466e27928bdc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3de39e87-f5", "ovs_interfaceid": "3de39e87-f579-458e-a713-326821c5daa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.021289] env[63538]: DEBUG oslo_vmware.api [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101644, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.224576] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c68760e5-d5ff-4248-83f0-ac74f902336d tempest-AttachVolumeTestJSON-1982835647 tempest-AttachVolumeTestJSON-1982835647-project-member] Lock "431a67e6-b90d-4930-9a86-7c49d1022ddc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.287s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.273386] env[63538]: INFO nova.compute.manager [-] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Took 1.58 seconds to deallocate network for instance. [ 1090.372873] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101643, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.953395} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.373230] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 42af31f3-a9d0-4fdd-99fa-442ebe915277/42af31f3-a9d0-4fdd-99fa-442ebe915277.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1090.373696] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1090.373941] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6dbb2399-4d1b-404e-bf99-c03c6bde2fe9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.384703] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1090.384703] env[63538]: value = "task-5101645" [ 1090.384703] env[63538]: _type = "Task" [ 1090.384703] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.390524] env[63538]: DEBUG nova.compute.manager [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Stashing vm_state: active {{(pid=63538) _prep_resize /opt/stack/nova/nova/compute/manager.py:5670}} [ 1090.402156] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101645, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.437561] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-215335bd-2f5a-4ef1-98a2-609f4f1546e6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.446219] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d53c527b-a234-46ae-994c-f29b60892f54 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.481353] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c22c2c7-55af-4486-a5a6-3cc89ddf64db {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.493341] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51c87541-7f91-4150-9bf7-a4a3d449a649 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.511231] env[63538]: DEBUG nova.compute.provider_tree [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1090.516159] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Releasing lock "refresh_cache-048573b4-26db-4a62-81e0-1bc1c3999d02" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1090.516612] env[63538]: DEBUG nova.compute.manager [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Instance network_info: |[{"id": "3de39e87-f579-458e-a713-326821c5daa5", "address": "fa:16:3e:f4:54:5a", "network": {"id": "4d592811-793b-45d6-a6d3-fff8ca7bbf30", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-654803980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df090f9a727d4cf4a0f466e27928bdc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3de39e87-f5", "ovs_interfaceid": "3de39e87-f579-458e-a713-326821c5daa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1090.517306] env[63538]: DEBUG oslo_concurrency.lockutils [req-512fc1fd-c134-4dca-a387-05666eb2d302 req-7cd148a3-dcc8-4442-b111-e1619c46b95c service nova] Acquired lock "refresh_cache-048573b4-26db-4a62-81e0-1bc1c3999d02" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.517397] env[63538]: DEBUG nova.network.neutron [req-512fc1fd-c134-4dca-a387-05666eb2d302 req-7cd148a3-dcc8-4442-b111-e1619c46b95c service nova] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Refreshing network info cache for port 3de39e87-f579-458e-a713-326821c5daa5 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1090.519391] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:54:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c621a9c-66f5-426a-8aab-bd8b2e912106', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3de39e87-f579-458e-a713-326821c5daa5', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1090.526737] env[63538]: DEBUG oslo.service.loopingcall [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1090.528751] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1090.529016] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-69296752-87e5-44c7-939f-3d6bbc0d6153 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.548150] env[63538]: DEBUG oslo_vmware.api [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101644, 'name': PowerOffVM_Task, 'duration_secs': 0.361924} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.549705] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1090.549705] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1090.549705] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a0fb5ef4-2197-41c6-8b03-a4ebf8f04687 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.554476] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1090.554476] env[63538]: value = "task-5101647" [ 1090.554476] env[63538]: _type = "Task" [ 1090.554476] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.563397] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101647, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.624020] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1090.624020] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1090.624020] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Deleting the datastore file [datastore2] 466be7db-79e4-49fd-aa3b-56fbe5c60457 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1090.624020] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-76192c97-ef42-4576-9f02-9a57579b41df {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.638059] env[63538]: DEBUG oslo_vmware.api [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1090.638059] env[63538]: value = "task-5101649" [ 1090.638059] env[63538]: _type = "Task" [ 1090.638059] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.649756] env[63538]: DEBUG oslo_vmware.api [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101649, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.782360] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.896485] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101645, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.141523} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.897111] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1090.900666] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-956b1226-d96e-4017-81b4-84ec6518f82a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.923550] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 42af31f3-a9d0-4fdd-99fa-442ebe915277/42af31f3-a9d0-4fdd-99fa-442ebe915277.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1090.924694] env[63538]: DEBUG oslo_concurrency.lockutils [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.924944] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af078ca1-1c89-4f7e-8622-a4363bc15c8e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.945289] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1090.945289] env[63538]: value = "task-5101650" [ 1090.945289] env[63538]: _type = "Task" [ 1090.945289] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.954580] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101650, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.017473] env[63538]: DEBUG nova.scheduler.client.report [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1091.065810] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101647, 'name': CreateVM_Task, 'duration_secs': 0.339183} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.065982] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1091.066753] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1091.066935] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.068986] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1091.069698] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3232ff27-daac-42f6-8f4c-036cbe9c4cf6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.076486] env[63538]: DEBUG oslo_vmware.api [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1091.076486] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c85cc9-02c3-3ff7-21a3-7232109e0c1b" [ 1091.076486] env[63538]: _type = "Task" [ 1091.076486] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.087181] env[63538]: DEBUG oslo_vmware.api [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c85cc9-02c3-3ff7-21a3-7232109e0c1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.124591] env[63538]: DEBUG nova.compute.manager [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1091.125504] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01cf3246-9ac9-4996-be2a-8682abc2ff14 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.148090] env[63538]: DEBUG oslo_vmware.api [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101649, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154178} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.148373] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1091.148588] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1091.148837] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1091.149047] env[63538]: INFO nova.compute.manager [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1091.149296] env[63538]: DEBUG oslo.service.loopingcall [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1091.149491] env[63538]: DEBUG nova.compute.manager [-] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1091.149585] env[63538]: DEBUG nova.network.neutron [-] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1091.381668] env[63538]: DEBUG nova.network.neutron [req-512fc1fd-c134-4dca-a387-05666eb2d302 req-7cd148a3-dcc8-4442-b111-e1619c46b95c service nova] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Updated VIF entry in instance network info cache for port 3de39e87-f579-458e-a713-326821c5daa5. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1091.382217] env[63538]: DEBUG nova.network.neutron [req-512fc1fd-c134-4dca-a387-05666eb2d302 req-7cd148a3-dcc8-4442-b111-e1619c46b95c service nova] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Updating instance_info_cache with network_info: [{"id": "3de39e87-f579-458e-a713-326821c5daa5", "address": "fa:16:3e:f4:54:5a", "network": {"id": "4d592811-793b-45d6-a6d3-fff8ca7bbf30", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-654803980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df090f9a727d4cf4a0f466e27928bdc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3de39e87-f5", "ovs_interfaceid": "3de39e87-f579-458e-a713-326821c5daa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.455953] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101650, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.522520] env[63538]: DEBUG oslo_concurrency.lockutils [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.831s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.525921] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.487s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.528353] env[63538]: INFO nova.compute.claims [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1091.551348] env[63538]: INFO nova.scheduler.client.report [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Deleted allocations for instance 0c19d662-4ae0-4ec9-93b4-9bd45822ed92 [ 1091.589213] env[63538]: DEBUG oslo_vmware.api [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c85cc9-02c3-3ff7-21a3-7232109e0c1b, 'name': SearchDatastore_Task, 'duration_secs': 0.014689} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.589988] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1091.589988] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1091.590212] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1091.590372] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.590586] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1091.590903] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4d14d250-6033-4275-8132-4029895eaa91 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.603674] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1091.604179] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1091.605437] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4321a137-839c-46bf-bcc3-6390f26451c6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.618971] env[63538]: DEBUG oslo_vmware.api [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1091.618971] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a325a5-d460-95a4-9840-eebb376d5c94" [ 1091.618971] env[63538]: _type = "Task" [ 1091.618971] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.628989] env[63538]: DEBUG oslo_vmware.api [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a325a5-d460-95a4-9840-eebb376d5c94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.638798] env[63538]: INFO nova.compute.manager [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] instance snapshotting [ 1091.639488] env[63538]: DEBUG nova.objects.instance [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lazy-loading 'flavor' on Instance uuid 3d80dc17-e330-4575-8e12-e06d8e76274a {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1091.886879] env[63538]: DEBUG oslo_concurrency.lockutils [req-512fc1fd-c134-4dca-a387-05666eb2d302 req-7cd148a3-dcc8-4442-b111-e1619c46b95c service nova] Releasing lock "refresh_cache-048573b4-26db-4a62-81e0-1bc1c3999d02" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1091.887203] env[63538]: DEBUG nova.compute.manager [req-512fc1fd-c134-4dca-a387-05666eb2d302 req-7cd148a3-dcc8-4442-b111-e1619c46b95c service nova] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Received event network-vif-deleted-8a332a90-393f-41ae-a924-4959c06e6207 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1091.957561] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101650, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.992444] env[63538]: DEBUG nova.compute.manager [req-e7f18fe6-181e-42a6-8e3e-27cfd6b4325f req-94545198-d0af-4f84-b362-161ea2f40012 service nova] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Received event network-vif-deleted-a679ee9b-3e51-4ce7-ab24-0792218d36ba {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1091.992564] env[63538]: INFO nova.compute.manager [req-e7f18fe6-181e-42a6-8e3e-27cfd6b4325f req-94545198-d0af-4f84-b362-161ea2f40012 service nova] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Neutron deleted interface a679ee9b-3e51-4ce7-ab24-0792218d36ba; detaching it from the instance and deleting it from the info cache [ 1091.992807] env[63538]: DEBUG nova.network.neutron [req-e7f18fe6-181e-42a6-8e3e-27cfd6b4325f req-94545198-d0af-4f84-b362-161ea2f40012 service nova] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.060601] env[63538]: DEBUG oslo_concurrency.lockutils [None req-15887bf7-8876-43a7-856e-c0fc01f6d02d tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "0c19d662-4ae0-4ec9-93b4-9bd45822ed92" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.107s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1092.131332] env[63538]: DEBUG oslo_vmware.api [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a325a5-d460-95a4-9840-eebb376d5c94, 'name': SearchDatastore_Task, 'duration_secs': 0.085183} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.131873] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10dd2dae-943a-40e3-af80-ae5d37d7ef4f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.139029] env[63538]: DEBUG oslo_vmware.api [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1092.139029] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cbbc0d-e4f9-3e07-6744-25b6124ae662" [ 1092.139029] env[63538]: _type = "Task" [ 1092.139029] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.152068] env[63538]: DEBUG oslo_vmware.api [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52cbbc0d-e4f9-3e07-6744-25b6124ae662, 'name': SearchDatastore_Task, 'duration_secs': 0.010554} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.152726] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1092.152935] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 048573b4-26db-4a62-81e0-1bc1c3999d02/048573b4-26db-4a62-81e0-1bc1c3999d02.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1092.153730] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af5c3c8-e5f5-46c9-8909-22ad7bee55c9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.156693] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6ddb7ee6-d6ae-4ebb-bf12-a04d054dedd0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.180247] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88799a83-157d-4d17-a1a2-28f571ed1f6f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.183841] env[63538]: DEBUG oslo_vmware.api [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1092.183841] env[63538]: value = "task-5101652" [ 1092.183841] env[63538]: _type = "Task" [ 1092.183841] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.199371] env[63538]: DEBUG oslo_vmware.api [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.255359] env[63538]: DEBUG nova.network.neutron [-] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.456721] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101650, 'name': ReconfigVM_Task, 'duration_secs': 1.095515} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.457059] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 42af31f3-a9d0-4fdd-99fa-442ebe915277/42af31f3-a9d0-4fdd-99fa-442ebe915277.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1092.457743] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dc278d8b-d727-4342-8672-c2492fff31c8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.465212] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1092.465212] env[63538]: value = "task-5101653" [ 1092.465212] env[63538]: _type = "Task" [ 1092.465212] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.474302] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101653, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.495925] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a96ade90-bc62-464e-bb70-1ff19e0457c6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.508971] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a767e944-9c82-4150-ab15-764c0a851dce {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.550350] env[63538]: DEBUG nova.compute.manager [req-e7f18fe6-181e-42a6-8e3e-27cfd6b4325f req-94545198-d0af-4f84-b362-161ea2f40012 service nova] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Detach interface failed, port_id=a679ee9b-3e51-4ce7-ab24-0792218d36ba, reason: Instance 466be7db-79e4-49fd-aa3b-56fbe5c60457 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1092.695341] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Creating Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1092.699658] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4fc6fc0c-8413-4b99-b3ac-3e0997826177 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.701910] env[63538]: DEBUG oslo_vmware.api [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101652, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.710871] env[63538]: DEBUG oslo_vmware.api [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1092.710871] env[63538]: value = "task-5101654" [ 1092.710871] env[63538]: _type = "Task" [ 1092.710871] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.724177] env[63538]: DEBUG oslo_vmware.api [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101654, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.734372] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "ede967c0-ec3a-4f26-8290-0ee36890cd75" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.734704] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "ede967c0-ec3a-4f26-8290-0ee36890cd75" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.734973] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "ede967c0-ec3a-4f26-8290-0ee36890cd75-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.735193] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "ede967c0-ec3a-4f26-8290-0ee36890cd75-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.735401] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "ede967c0-ec3a-4f26-8290-0ee36890cd75-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1092.737842] env[63538]: INFO nova.compute.manager [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Terminating instance [ 1092.741579] env[63538]: DEBUG nova.compute.manager [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1092.741812] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1092.743580] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f36bae-08d7-485a-a9df-8d89b56bcb95 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.754589] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1092.757980] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2791454a-129c-4f20-aaa6-7410d2c91d31 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.760194] env[63538]: INFO nova.compute.manager [-] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Took 1.61 seconds to deallocate network for instance. [ 1092.768719] env[63538]: DEBUG oslo_vmware.api [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 1092.768719] env[63538]: value = "task-5101655" [ 1092.768719] env[63538]: _type = "Task" [ 1092.768719] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.774325] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeacbc37-ac2e-4bf8-bfda-871e564a4172 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.783172] env[63538]: DEBUG oslo_vmware.api [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101655, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.788933] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8abe160b-5ea5-430b-894d-ead626e59985 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.827481] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b93f8a2c-aa24-4fb7-8afc-55b9608d748c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.837711] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8febf066-fb29-43c7-a09d-90b7161c158a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.853147] env[63538]: DEBUG nova.compute.provider_tree [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1092.981035] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101653, 'name': Rename_Task, 'duration_secs': 0.373206} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.981035] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1092.981035] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7fa7b2e4-62e2-436d-a9f6-a04f55341e97 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.990365] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1092.990365] env[63538]: value = "task-5101656" [ 1092.990365] env[63538]: _type = "Task" [ 1092.990365] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.001214] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101656, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.195519] env[63538]: DEBUG oslo_vmware.api [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101652, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.626594} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.195727] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 048573b4-26db-4a62-81e0-1bc1c3999d02/048573b4-26db-4a62-81e0-1bc1c3999d02.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1093.196052] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1093.196174] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-85942059-3d23-4c4d-9b41-ebf5418b9c6d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.203414] env[63538]: DEBUG oslo_vmware.api [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1093.203414] env[63538]: value = "task-5101657" [ 1093.203414] env[63538]: _type = "Task" [ 1093.203414] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.213099] env[63538]: DEBUG oslo_vmware.api [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101657, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.221469] env[63538]: DEBUG oslo_vmware.api [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101654, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.270373] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.281091] env[63538]: DEBUG oslo_vmware.api [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101655, 'name': PowerOffVM_Task, 'duration_secs': 0.279722} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.281397] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1093.281580] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1093.281873] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee977275-64ad-4634-98f0-35528eaf4dc9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.356857] env[63538]: DEBUG nova.scheduler.client.report [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1093.364428] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1093.364669] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1093.364878] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Deleting the datastore file [datastore1] ede967c0-ec3a-4f26-8290-0ee36890cd75 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1093.365187] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-920ea9f5-db58-4980-9d5f-78c4c06b2d13 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.372527] env[63538]: DEBUG oslo_vmware.api [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for the task: (returnval){ [ 1093.372527] env[63538]: value = "task-5101659" [ 1093.372527] env[63538]: _type = "Task" [ 1093.372527] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.384625] env[63538]: DEBUG oslo_vmware.api [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101659, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.501170] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101656, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.717916] env[63538]: DEBUG oslo_vmware.api [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101657, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.173352} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.722307] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1093.723371] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bca400a-2e9a-485b-bb5b-39a9df766237 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.732398] env[63538]: DEBUG oslo_vmware.api [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101654, 'name': CreateSnapshot_Task, 'duration_secs': 0.664253} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.741997] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Created Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1093.755019] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 048573b4-26db-4a62-81e0-1bc1c3999d02/048573b4-26db-4a62-81e0-1bc1c3999d02.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1093.756151] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa314ee-3c4c-4110-8866-b9a9397ce9ca {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.759915] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4de450e-d608-4c49-a179-3a8c822f0092 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.790447] env[63538]: DEBUG oslo_vmware.api [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1093.790447] env[63538]: value = "task-5101661" [ 1093.790447] env[63538]: _type = "Task" [ 1093.790447] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.799975] env[63538]: DEBUG oslo_vmware.api [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101661, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.864734] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.340s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.865358] env[63538]: DEBUG nova.compute.manager [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1093.868406] env[63538]: DEBUG oslo_concurrency.lockutils [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.475s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.868731] env[63538]: DEBUG oslo_concurrency.lockutils [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.871821] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 8.365s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.891316] env[63538]: DEBUG oslo_vmware.api [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Task: {'id': task-5101659, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.474423} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.891694] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1093.892046] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1093.892520] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1093.892838] env[63538]: INFO nova.compute.manager [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1093.893857] env[63538]: DEBUG oslo.service.loopingcall [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1093.894026] env[63538]: DEBUG nova.compute.manager [-] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1093.894261] env[63538]: DEBUG nova.network.neutron [-] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1093.909814] env[63538]: INFO nova.scheduler.client.report [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Deleted allocations for instance fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9 [ 1094.001414] env[63538]: DEBUG oslo_vmware.api [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101656, 'name': PowerOnVM_Task, 'duration_secs': 0.552393} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.001720] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1094.001936] env[63538]: INFO nova.compute.manager [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Took 8.90 seconds to spawn the instance on the hypervisor. [ 1094.002140] env[63538]: DEBUG nova.compute.manager [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1094.002975] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e9f98d3-59ff-483f-8538-e4d608a5347f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.253682] env[63538]: DEBUG nova.compute.manager [req-2bbc1167-dd2d-44b0-a931-46610ada2523 req-efcf60d2-027b-4477-8970-641a2ed0c905 service nova] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Received event network-vif-deleted-f752fb93-15ab-4803-9e58-012b22d5f121 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1094.253904] env[63538]: INFO nova.compute.manager [req-2bbc1167-dd2d-44b0-a931-46610ada2523 req-efcf60d2-027b-4477-8970-641a2ed0c905 service nova] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Neutron deleted interface f752fb93-15ab-4803-9e58-012b22d5f121; detaching it from the instance and deleting it from the info cache [ 1094.254097] env[63538]: DEBUG nova.network.neutron [req-2bbc1167-dd2d-44b0-a931-46610ada2523 req-efcf60d2-027b-4477-8970-641a2ed0c905 service nova] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.300097] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Creating linked-clone VM from snapshot {{(pid=63538) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1094.300097] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e13321e3-59b7-43d6-95e9-bf311544a451 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.312938] env[63538]: DEBUG oslo_vmware.api [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101661, 'name': ReconfigVM_Task, 'duration_secs': 0.341463} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.314454] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 048573b4-26db-4a62-81e0-1bc1c3999d02/048573b4-26db-4a62-81e0-1bc1c3999d02.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1094.315220] env[63538]: DEBUG oslo_vmware.api [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1094.315220] env[63538]: value = "task-5101663" [ 1094.315220] env[63538]: _type = "Task" [ 1094.315220] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.315437] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-19efe30f-bfc9-4c25-82c8-68cda64bf603 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.332061] env[63538]: DEBUG oslo_vmware.api [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101663, 'name': CloneVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.333350] env[63538]: DEBUG oslo_vmware.api [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1094.333350] env[63538]: value = "task-5101664" [ 1094.333350] env[63538]: _type = "Task" [ 1094.333350] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.341637] env[63538]: DEBUG oslo_vmware.api [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101664, 'name': Rename_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.376526] env[63538]: DEBUG nova.compute.utils [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1094.385512] env[63538]: DEBUG nova.compute.manager [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1094.385725] env[63538]: DEBUG nova.network.neutron [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1094.418635] env[63538]: DEBUG oslo_concurrency.lockutils [None req-60df62c7-1540-4f60-9a0a-d8ab0ac38e8e tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.008s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1094.432587] env[63538]: DEBUG nova.policy [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '16fdc041f4c74e0ea76ee8984f9786f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a701618902d411b8af203fdbb1069be', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1094.529023] env[63538]: INFO nova.compute.manager [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Took 17.49 seconds to build instance. [ 1094.707349] env[63538]: DEBUG nova.network.neutron [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Successfully created port: 09315fe4-910e-4534-9382-1558fa660416 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1094.732293] env[63538]: DEBUG nova.network.neutron [-] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.758856] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-13f314bc-d78e-407f-95dc-9d011c20dd6c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.771244] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d3e253-6b93-4170-8ca8-54aa971875a5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.805610] env[63538]: DEBUG nova.compute.manager [req-2bbc1167-dd2d-44b0-a931-46610ada2523 req-efcf60d2-027b-4477-8970-641a2ed0c905 service nova] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Detach interface failed, port_id=f752fb93-15ab-4803-9e58-012b22d5f121, reason: Instance ede967c0-ec3a-4f26-8290-0ee36890cd75 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1094.827420] env[63538]: DEBUG oslo_vmware.api [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101663, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.845360] env[63538]: DEBUG oslo_vmware.api [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101664, 'name': Rename_Task, 'duration_secs': 0.165476} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.845653] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1094.845954] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-87c960ab-f2b7-4e70-818b-7baac994c1cc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.853325] env[63538]: DEBUG oslo_vmware.api [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1094.853325] env[63538]: value = "task-5101665" [ 1094.853325] env[63538]: _type = "Task" [ 1094.853325] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.863033] env[63538]: DEBUG oslo_vmware.api [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101665, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.887671] env[63538]: INFO nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Updating resource usage from migration 8a556cae-5667-4483-8ca3-57542b6380a6 [ 1094.890326] env[63538]: DEBUG nova.compute.manager [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1094.910594] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance ede967c0-ec3a-4f26-8290-0ee36890cd75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1094.910794] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance fb26fb32-a420-4667-850c-e32786edd8f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1094.911144] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 3d80dc17-e330-4575-8e12-e06d8e76274a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1094.911144] env[63538]: WARNING nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 0df15328-aebd-44c5-9c78-ee05f188ad95 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1094.911301] env[63538]: WARNING nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 466be7db-79e4-49fd-aa3b-56fbe5c60457 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1094.911451] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 6257bf5c-8a1c-4204-9605-cc07491e14ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1094.911672] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 42af31f3-a9d0-4fdd-99fa-442ebe915277 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1094.911845] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 048573b4-26db-4a62-81e0-1bc1c3999d02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1094.912021] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 17350ce4-555b-4f00-9a75-de32a4453141 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1094.912187] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Migration 8a556cae-5667-4483-8ca3-57542b6380a6 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1715}} [ 1094.912472] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 049518bd-d569-491a-8f79-6f0b78cf44b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1094.912769] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=63538) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1094.912994] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2048MB phys_disk=100GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '8', 'num_vm_active': '5', 'num_task_deleting': '1', 'num_os_type_None': '8', 'num_proj_ea05f3fb4676466bb2a286f5a2fefb8f': '1', 'io_workload': '4', 'num_task_None': '3', 'num_proj_df090f9a727d4cf4a0f466e27928bdc6': '2', 'num_task_image_uploading': '1', 'num_proj_1a06b7cc1ab24ba584bbe970e4fc5e81': '1', 'num_proj_9294e4310e484e338932e3514d079594': '1', 'num_task_resize_prep': '1', 'num_proj_492427e54e1048f292dab2abdac71af5': '1', 'num_vm_building': '3', 'num_task_spawning': '2', 'num_proj_7063c42297c24f2baf7271fa25dec927': '1', 'num_proj_2a701618902d411b8af203fdbb1069be': '1'} {{(pid=63538) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1095.030093] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e0e2316a-e13a-4125-80b8-c97b51b97938 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "42af31f3-a9d0-4fdd-99fa-442ebe915277" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.010s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.102710] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d4c39d-5d1e-491a-be02-2dfd6c77b25e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.112212] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-786639fd-3843-499f-a607-7f9fb9aaaff0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.143747] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa3944e1-23c1-427f-aedc-3b047115f78c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.152063] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df6a0a84-f6ea-49cf-bab7-8f30848e249e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.167588] env[63538]: DEBUG nova.compute.provider_tree [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1095.234981] env[63538]: INFO nova.compute.manager [-] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Took 1.34 seconds to deallocate network for instance. [ 1095.331288] env[63538]: DEBUG oslo_vmware.api [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101663, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.368682] env[63538]: DEBUG oslo_vmware.api [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101665, 'name': PowerOnVM_Task, 'duration_secs': 0.509546} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.368682] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1095.368906] env[63538]: INFO nova.compute.manager [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Took 7.92 seconds to spawn the instance on the hypervisor. [ 1095.369275] env[63538]: DEBUG nova.compute.manager [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1095.370680] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44365b8b-6703-4a5e-9233-31086afcf283 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.671438] env[63538]: DEBUG nova.scheduler.client.report [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1095.742326] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.829786] env[63538]: DEBUG oslo_vmware.api [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101663, 'name': CloneVM_Task} progress is 95%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.895169] env[63538]: INFO nova.compute.manager [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Took 16.20 seconds to build instance. [ 1095.902536] env[63538]: DEBUG nova.compute.manager [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1095.937131] env[63538]: DEBUG nova.virt.hardware [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1095.937400] env[63538]: DEBUG nova.virt.hardware [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1095.937722] env[63538]: DEBUG nova.virt.hardware [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1095.938095] env[63538]: DEBUG nova.virt.hardware [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1095.938420] env[63538]: DEBUG nova.virt.hardware [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1095.940194] env[63538]: DEBUG nova.virt.hardware [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1095.940194] env[63538]: DEBUG nova.virt.hardware [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1095.940194] env[63538]: DEBUG nova.virt.hardware [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1095.940194] env[63538]: DEBUG nova.virt.hardware [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1095.940194] env[63538]: DEBUG nova.virt.hardware [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1095.940426] env[63538]: DEBUG nova.virt.hardware [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1095.941695] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ce816b-982d-4f53-9b6f-b9eaec6dd991 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.951749] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acecc676-c698-472d-b0ee-01ef0289269a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.177076] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63538) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1096.177339] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.306s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.178046] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.396s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1096.178274] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.181164] env[63538]: DEBUG oslo_concurrency.lockutils [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 5.256s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1096.207998] env[63538]: INFO nova.scheduler.client.report [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Deleted allocations for instance 0df15328-aebd-44c5-9c78-ee05f188ad95 [ 1096.332195] env[63538]: DEBUG oslo_vmware.api [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101663, 'name': CloneVM_Task, 'duration_secs': 1.815451} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.332504] env[63538]: INFO nova.virt.vmwareapi.vmops [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Created linked-clone VM from snapshot [ 1096.334466] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b675b7ad-7495-4603-b4e9-cf837feed09b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.339036] env[63538]: DEBUG nova.compute.manager [req-fcdd6ef3-1266-44b3-8fab-38c9bc2f90be req-307fb764-2366-49ab-9989-891a878bf7f6 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Received event network-changed-dfaa4640-ae2a-444b-aa92-e24dd9eca692 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1096.339386] env[63538]: DEBUG nova.compute.manager [req-fcdd6ef3-1266-44b3-8fab-38c9bc2f90be req-307fb764-2366-49ab-9989-891a878bf7f6 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Refreshing instance network info cache due to event network-changed-dfaa4640-ae2a-444b-aa92-e24dd9eca692. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1096.339739] env[63538]: DEBUG oslo_concurrency.lockutils [req-fcdd6ef3-1266-44b3-8fab-38c9bc2f90be req-307fb764-2366-49ab-9989-891a878bf7f6 service nova] Acquiring lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1096.340080] env[63538]: DEBUG oslo_concurrency.lockutils [req-fcdd6ef3-1266-44b3-8fab-38c9bc2f90be req-307fb764-2366-49ab-9989-891a878bf7f6 service nova] Acquired lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.340411] env[63538]: DEBUG nova.network.neutron [req-fcdd6ef3-1266-44b3-8fab-38c9bc2f90be req-307fb764-2366-49ab-9989-891a878bf7f6 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Refreshing network info cache for port dfaa4640-ae2a-444b-aa92-e24dd9eca692 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1096.351023] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Uploading image 46702d35-6127-4638-a292-93cafec9d37b {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 1096.387888] env[63538]: DEBUG oslo_vmware.rw_handles [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1096.387888] env[63538]: value = "vm-992511" [ 1096.387888] env[63538]: _type = "VirtualMachine" [ 1096.387888] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1096.388724] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ed7f54f6-4326-4728-a029-c1be49bcfd06 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.398451] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b332e2a3-0085-47d9-bbb6-5c335e3015b9 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "048573b4-26db-4a62-81e0-1bc1c3999d02" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.709s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.398801] env[63538]: DEBUG oslo_vmware.rw_handles [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lease: (returnval){ [ 1096.398801] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5269c8a1-d6d6-8bf8-a58b-9b0b1af73103" [ 1096.398801] env[63538]: _type = "HttpNfcLease" [ 1096.398801] env[63538]: } obtained for exporting VM: (result){ [ 1096.398801] env[63538]: value = "vm-992511" [ 1096.398801] env[63538]: _type = "VirtualMachine" [ 1096.398801] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1096.399218] env[63538]: DEBUG oslo_vmware.api [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the lease: (returnval){ [ 1096.399218] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5269c8a1-d6d6-8bf8-a58b-9b0b1af73103" [ 1096.399218] env[63538]: _type = "HttpNfcLease" [ 1096.399218] env[63538]: } to be ready. {{(pid=63538) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1096.407280] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1096.407280] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5269c8a1-d6d6-8bf8-a58b-9b0b1af73103" [ 1096.407280] env[63538]: _type = "HttpNfcLease" [ 1096.407280] env[63538]: } is initializing. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1096.466652] env[63538]: DEBUG nova.network.neutron [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Successfully updated port: 09315fe4-910e-4534-9382-1558fa660416 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1096.689349] env[63538]: INFO nova.compute.claims [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1096.721652] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d2b11c10-7e6d-4228-bf3c-14c73fb9e55b tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "0df15328-aebd-44c5-9c78-ee05f188ad95" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.199s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.908713] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1096.908713] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5269c8a1-d6d6-8bf8-a58b-9b0b1af73103" [ 1096.908713] env[63538]: _type = "HttpNfcLease" [ 1096.908713] env[63538]: } is ready. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1096.909020] env[63538]: DEBUG oslo_vmware.rw_handles [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1096.909020] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5269c8a1-d6d6-8bf8-a58b-9b0b1af73103" [ 1096.909020] env[63538]: _type = "HttpNfcLease" [ 1096.909020] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1096.909767] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-786d5a48-548d-43ff-8d1e-b19d559cd715 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.923032] env[63538]: DEBUG oslo_vmware.rw_handles [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d4b92d-342c-5049-61e7-d4d3d5e1acbf/disk-0.vmdk from lease info. {{(pid=63538) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1096.923032] env[63538]: DEBUG oslo_vmware.rw_handles [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d4b92d-342c-5049-61e7-d4d3d5e1acbf/disk-0.vmdk for reading. {{(pid=63538) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1096.988327] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "refresh_cache-17350ce4-555b-4f00-9a75-de32a4453141" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1096.988589] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired lock "refresh_cache-17350ce4-555b-4f00-9a75-de32a4453141" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.988652] env[63538]: DEBUG nova.network.neutron [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1097.000027] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "4387a3ec-0f0b-4917-97f3-08c737bee4e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1097.000438] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "4387a3ec-0f0b-4917-97f3-08c737bee4e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1097.071459] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-272ffcdb-c51d-4864-be63-3ceb9e3b215e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.196038] env[63538]: INFO nova.compute.resource_tracker [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Updating resource usage from migration 8a556cae-5667-4483-8ca3-57542b6380a6 [ 1097.217724] env[63538]: DEBUG nova.network.neutron [req-fcdd6ef3-1266-44b3-8fab-38c9bc2f90be req-307fb764-2366-49ab-9989-891a878bf7f6 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Updated VIF entry in instance network info cache for port dfaa4640-ae2a-444b-aa92-e24dd9eca692. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1097.218103] env[63538]: DEBUG nova.network.neutron [req-fcdd6ef3-1266-44b3-8fab-38c9bc2f90be req-307fb764-2366-49ab-9989-891a878bf7f6 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Updating instance_info_cache with network_info: [{"id": "dfaa4640-ae2a-444b-aa92-e24dd9eca692", "address": "fa:16:3e:f0:24:40", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfaa4640-ae", "ovs_interfaceid": "dfaa4640-ae2a-444b-aa92-e24dd9eca692", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.387010] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc1ee6e7-69b2-48f0-b375-a170e214a35b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.397079] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d2d43a-96fc-4260-94ce-82b0f559072e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.429766] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33cc04bc-60eb-44a2-afe9-1a1e3b32ecc8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.440095] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63fd732d-ee5c-473a-9f3e-548319839e08 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.456385] env[63538]: DEBUG nova.compute.provider_tree [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1097.505156] env[63538]: DEBUG nova.compute.manager [req-0e22fbd9-2222-406f-8bb5-d5b85de45826 req-ccffdf72-b3f3-4769-ac96-a3af8c8a4e5e service nova] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Received event network-changed-3de39e87-f579-458e-a713-326821c5daa5 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1097.505859] env[63538]: DEBUG nova.compute.manager [req-0e22fbd9-2222-406f-8bb5-d5b85de45826 req-ccffdf72-b3f3-4769-ac96-a3af8c8a4e5e service nova] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Refreshing instance network info cache due to event network-changed-3de39e87-f579-458e-a713-326821c5daa5. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1097.506077] env[63538]: DEBUG oslo_concurrency.lockutils [req-0e22fbd9-2222-406f-8bb5-d5b85de45826 req-ccffdf72-b3f3-4769-ac96-a3af8c8a4e5e service nova] Acquiring lock "refresh_cache-048573b4-26db-4a62-81e0-1bc1c3999d02" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1097.506209] env[63538]: DEBUG oslo_concurrency.lockutils [req-0e22fbd9-2222-406f-8bb5-d5b85de45826 req-ccffdf72-b3f3-4769-ac96-a3af8c8a4e5e service nova] Acquired lock "refresh_cache-048573b4-26db-4a62-81e0-1bc1c3999d02" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.507107] env[63538]: DEBUG nova.network.neutron [req-0e22fbd9-2222-406f-8bb5-d5b85de45826 req-ccffdf72-b3f3-4769-ac96-a3af8c8a4e5e service nova] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Refreshing network info cache for port 3de39e87-f579-458e-a713-326821c5daa5 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1097.509148] env[63538]: DEBUG nova.compute.manager [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1097.581260] env[63538]: DEBUG nova.network.neutron [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1097.721173] env[63538]: DEBUG oslo_concurrency.lockutils [req-fcdd6ef3-1266-44b3-8fab-38c9bc2f90be req-307fb764-2366-49ab-9989-891a878bf7f6 service nova] Releasing lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1097.774540] env[63538]: DEBUG nova.network.neutron [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Updating instance_info_cache with network_info: [{"id": "09315fe4-910e-4534-9382-1558fa660416", "address": "fa:16:3e:c8:34:f8", "network": {"id": "8facc2ff-7a17-4beb-ad4f-7cf9d95cc93c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-137157780-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a701618902d411b8af203fdbb1069be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09315fe4-91", "ovs_interfaceid": "09315fe4-910e-4534-9382-1558fa660416", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.960726] env[63538]: DEBUG nova.scheduler.client.report [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1098.042469] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.277672] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Releasing lock "refresh_cache-17350ce4-555b-4f00-9a75-de32a4453141" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1098.278466] env[63538]: DEBUG nova.compute.manager [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Instance network_info: |[{"id": "09315fe4-910e-4534-9382-1558fa660416", "address": "fa:16:3e:c8:34:f8", "network": {"id": "8facc2ff-7a17-4beb-ad4f-7cf9d95cc93c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-137157780-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a701618902d411b8af203fdbb1069be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09315fe4-91", "ovs_interfaceid": "09315fe4-910e-4534-9382-1558fa660416", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1098.280586] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:34:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b80dd748-3d7e-4a23-a38d-9e79a3881452', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '09315fe4-910e-4534-9382-1558fa660416', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1098.288766] env[63538]: DEBUG oslo.service.loopingcall [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1098.293018] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1098.293018] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7337c52c-8615-4611-9d02-73d37784bf33 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.314200] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1098.314200] env[63538]: value = "task-5101669" [ 1098.314200] env[63538]: _type = "Task" [ 1098.314200] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.324317] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101669, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.380519] env[63538]: DEBUG nova.compute.manager [req-092f9aad-85f5-4215-9b1a-bbe9722b906c req-49c4fc1a-292b-44d5-874f-3c15b057e4a3 service nova] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Received event network-vif-plugged-09315fe4-910e-4534-9382-1558fa660416 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1098.381382] env[63538]: DEBUG oslo_concurrency.lockutils [req-092f9aad-85f5-4215-9b1a-bbe9722b906c req-49c4fc1a-292b-44d5-874f-3c15b057e4a3 service nova] Acquiring lock "17350ce4-555b-4f00-9a75-de32a4453141-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.381799] env[63538]: DEBUG oslo_concurrency.lockutils [req-092f9aad-85f5-4215-9b1a-bbe9722b906c req-49c4fc1a-292b-44d5-874f-3c15b057e4a3 service nova] Lock "17350ce4-555b-4f00-9a75-de32a4453141-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1098.382126] env[63538]: DEBUG oslo_concurrency.lockutils [req-092f9aad-85f5-4215-9b1a-bbe9722b906c req-49c4fc1a-292b-44d5-874f-3c15b057e4a3 service nova] Lock "17350ce4-555b-4f00-9a75-de32a4453141-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1098.382443] env[63538]: DEBUG nova.compute.manager [req-092f9aad-85f5-4215-9b1a-bbe9722b906c req-49c4fc1a-292b-44d5-874f-3c15b057e4a3 service nova] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] No waiting events found dispatching network-vif-plugged-09315fe4-910e-4534-9382-1558fa660416 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1098.382782] env[63538]: WARNING nova.compute.manager [req-092f9aad-85f5-4215-9b1a-bbe9722b906c req-49c4fc1a-292b-44d5-874f-3c15b057e4a3 service nova] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Received unexpected event network-vif-plugged-09315fe4-910e-4534-9382-1558fa660416 for instance with vm_state building and task_state spawning. [ 1098.383149] env[63538]: DEBUG nova.compute.manager [req-092f9aad-85f5-4215-9b1a-bbe9722b906c req-49c4fc1a-292b-44d5-874f-3c15b057e4a3 service nova] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Received event network-changed-09315fe4-910e-4534-9382-1558fa660416 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1098.383529] env[63538]: DEBUG nova.compute.manager [req-092f9aad-85f5-4215-9b1a-bbe9722b906c req-49c4fc1a-292b-44d5-874f-3c15b057e4a3 service nova] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Refreshing instance network info cache due to event network-changed-09315fe4-910e-4534-9382-1558fa660416. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1098.385574] env[63538]: DEBUG oslo_concurrency.lockutils [req-092f9aad-85f5-4215-9b1a-bbe9722b906c req-49c4fc1a-292b-44d5-874f-3c15b057e4a3 service nova] Acquiring lock "refresh_cache-17350ce4-555b-4f00-9a75-de32a4453141" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1098.385574] env[63538]: DEBUG oslo_concurrency.lockutils [req-092f9aad-85f5-4215-9b1a-bbe9722b906c req-49c4fc1a-292b-44d5-874f-3c15b057e4a3 service nova] Acquired lock "refresh_cache-17350ce4-555b-4f00-9a75-de32a4453141" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.385574] env[63538]: DEBUG nova.network.neutron [req-092f9aad-85f5-4215-9b1a-bbe9722b906c req-49c4fc1a-292b-44d5-874f-3c15b057e4a3 service nova] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Refreshing network info cache for port 09315fe4-910e-4534-9382-1558fa660416 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1098.467187] env[63538]: DEBUG oslo_concurrency.lockutils [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.286s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1098.467763] env[63538]: INFO nova.compute.manager [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Migrating [ 1098.475024] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.205s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1098.475577] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1098.478457] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.736s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1098.478457] env[63538]: DEBUG nova.objects.instance [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lazy-loading 'resources' on Instance uuid ede967c0-ec3a-4f26-8290-0ee36890cd75 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1098.480765] env[63538]: DEBUG nova.network.neutron [req-0e22fbd9-2222-406f-8bb5-d5b85de45826 req-ccffdf72-b3f3-4769-ac96-a3af8c8a4e5e service nova] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Updated VIF entry in instance network info cache for port 3de39e87-f579-458e-a713-326821c5daa5. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1098.480874] env[63538]: DEBUG nova.network.neutron [req-0e22fbd9-2222-406f-8bb5-d5b85de45826 req-ccffdf72-b3f3-4769-ac96-a3af8c8a4e5e service nova] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Updating instance_info_cache with network_info: [{"id": "3de39e87-f579-458e-a713-326821c5daa5", "address": "fa:16:3e:f4:54:5a", "network": {"id": "4d592811-793b-45d6-a6d3-fff8ca7bbf30", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-654803980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df090f9a727d4cf4a0f466e27928bdc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3de39e87-f5", "ovs_interfaceid": "3de39e87-f579-458e-a713-326821c5daa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.511776] env[63538]: INFO nova.scheduler.client.report [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Deleted allocations for instance 466be7db-79e4-49fd-aa3b-56fbe5c60457 [ 1098.826806] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101669, 'name': CreateVM_Task, 'duration_secs': 0.507284} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.827168] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1098.827940] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1098.828364] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.829039] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1098.829161] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ed5387a-9442-4eda-b074-d82831fbc31c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.835642] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1098.835642] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ac402c-ee97-e465-6190-eac452a7c414" [ 1098.835642] env[63538]: _type = "Task" [ 1098.835642] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.845349] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ac402c-ee97-e465-6190-eac452a7c414, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.995383] env[63538]: DEBUG oslo_concurrency.lockutils [req-0e22fbd9-2222-406f-8bb5-d5b85de45826 req-ccffdf72-b3f3-4769-ac96-a3af8c8a4e5e service nova] Releasing lock "refresh_cache-048573b4-26db-4a62-81e0-1bc1c3999d02" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1098.998519] env[63538]: DEBUG oslo_concurrency.lockutils [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "refresh_cache-049518bd-d569-491a-8f79-6f0b78cf44b2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1098.998772] env[63538]: DEBUG oslo_concurrency.lockutils [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired lock "refresh_cache-049518bd-d569-491a-8f79-6f0b78cf44b2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.998979] env[63538]: DEBUG nova.network.neutron [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1099.024833] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9b144d21-edac-4769-9065-522a965c58ea tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "466be7db-79e4-49fd-aa3b-56fbe5c60457" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.044s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1099.206101] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d870e4a3-8c0c-4e2f-aa33-1dc50c772dd8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.215588] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261b6c5a-1ef4-44b3-8e6f-d64bb734bcea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.248644] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3769cc51-e2c3-4371-a888-2db89eb9d3d2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.258555] env[63538]: DEBUG nova.network.neutron [req-092f9aad-85f5-4215-9b1a-bbe9722b906c req-49c4fc1a-292b-44d5-874f-3c15b057e4a3 service nova] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Updated VIF entry in instance network info cache for port 09315fe4-910e-4534-9382-1558fa660416. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1099.258987] env[63538]: DEBUG nova.network.neutron [req-092f9aad-85f5-4215-9b1a-bbe9722b906c req-49c4fc1a-292b-44d5-874f-3c15b057e4a3 service nova] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Updating instance_info_cache with network_info: [{"id": "09315fe4-910e-4534-9382-1558fa660416", "address": "fa:16:3e:c8:34:f8", "network": {"id": "8facc2ff-7a17-4beb-ad4f-7cf9d95cc93c", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-137157780-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a701618902d411b8af203fdbb1069be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09315fe4-91", "ovs_interfaceid": "09315fe4-910e-4534-9382-1558fa660416", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.265264] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eaaa78f-fbed-4843-842b-3b70e1beef05 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.287544] env[63538]: DEBUG nova.compute.provider_tree [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1099.347316] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ac402c-ee97-e465-6190-eac452a7c414, 'name': SearchDatastore_Task, 'duration_secs': 0.018575} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.347688] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1099.349181] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1099.349181] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1099.349181] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.349181] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1099.349181] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73f16fad-45ef-4e7e-9235-3993780149bc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.359721] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1099.359971] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1099.361542] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-502b0cfb-30b9-46e1-bddc-cba3bb0816a6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.369063] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1099.369063] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5227ecec-d207-cb49-6e79-80ae5a40f9b7" [ 1099.369063] env[63538]: _type = "Task" [ 1099.369063] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.378060] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5227ecec-d207-cb49-6e79-80ae5a40f9b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.579574] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.579574] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1099.776282] env[63538]: DEBUG nova.network.neutron [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Updating instance_info_cache with network_info: [{"id": "da39877d-c305-4a70-8310-b2ad992f0cc7", "address": "fa:16:3e:e7:40:0a", "network": {"id": "01450801-f549-4844-80bd-208ec405c65f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684310085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "492427e54e1048f292dab2abdac71af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda39877d-c3", "ovs_interfaceid": "da39877d-c305-4a70-8310-b2ad992f0cc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.776282] env[63538]: DEBUG oslo_concurrency.lockutils [req-092f9aad-85f5-4215-9b1a-bbe9722b906c req-49c4fc1a-292b-44d5-874f-3c15b057e4a3 service nova] Releasing lock "refresh_cache-17350ce4-555b-4f00-9a75-de32a4453141" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1099.792038] env[63538]: DEBUG nova.scheduler.client.report [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1099.885702] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5227ecec-d207-cb49-6e79-80ae5a40f9b7, 'name': SearchDatastore_Task, 'duration_secs': 0.014324} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.886808] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97d04463-1e90-4419-96df-27cf727c5976 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.894127] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1099.894127] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5262ca62-e6c0-f376-8b39-e70796512000" [ 1099.894127] env[63538]: _type = "Task" [ 1099.894127] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.904353] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5262ca62-e6c0-f376-8b39-e70796512000, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.054774] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.055033] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.081130] env[63538]: DEBUG nova.compute.manager [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1100.278324] env[63538]: DEBUG oslo_concurrency.lockutils [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Releasing lock "refresh_cache-049518bd-d569-491a-8f79-6f0b78cf44b2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1100.297503] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.819s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1100.300678] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.258s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.304509] env[63538]: INFO nova.compute.claims [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1100.327017] env[63538]: INFO nova.scheduler.client.report [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Deleted allocations for instance ede967c0-ec3a-4f26-8290-0ee36890cd75 [ 1100.406749] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5262ca62-e6c0-f376-8b39-e70796512000, 'name': SearchDatastore_Task, 'duration_secs': 0.020209} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.407091] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1100.407474] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 17350ce4-555b-4f00-9a75-de32a4453141/17350ce4-555b-4f00-9a75-de32a4453141.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1100.407812] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aa8caa5c-8e16-4ded-b28e-d372a530c609 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.416348] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1100.416348] env[63538]: value = "task-5101670" [ 1100.416348] env[63538]: _type = "Task" [ 1100.416348] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.426640] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101670, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.559626] env[63538]: DEBUG nova.compute.manager [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1100.570451] env[63538]: DEBUG oslo_concurrency.lockutils [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Acquiring lock "aaf52cad-86fd-42df-8ee3-13724e3f5e94" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.570840] env[63538]: DEBUG oslo_concurrency.lockutils [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Lock "aaf52cad-86fd-42df-8ee3-13724e3f5e94" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.614996] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.839028] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8eb2bf32-9054-49f4-909c-b95bbb8e1ae4 tempest-ServersTestJSON-148199479 tempest-ServersTestJSON-148199479-project-member] Lock "ede967c0-ec3a-4f26-8290-0ee36890cd75" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.101s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1100.927577] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101670, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.073441] env[63538]: DEBUG nova.compute.manager [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1101.096502] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1101.432961] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101670, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.827094} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.433170] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 17350ce4-555b-4f00-9a75-de32a4453141/17350ce4-555b-4f00-9a75-de32a4453141.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1101.433380] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1101.433727] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-64ed2380-9e7d-4239-acf1-b43889ac7496 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.445341] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1101.445341] env[63538]: value = "task-5101671" [ 1101.445341] env[63538]: _type = "Task" [ 1101.445341] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.456336] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101671, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.517713] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7277797-3143-4631-9f28-8109e3a1f410 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.525569] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5729f45-58b7-4db5-b555-5cae8a2072bf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.557754] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98055085-a4aa-411c-a45e-622161c11c4a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.566901] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc3751bf-38bc-44b0-8ccd-7251b64b1fb9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.585114] env[63538]: DEBUG nova.compute.provider_tree [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1101.600023] env[63538]: DEBUG oslo_concurrency.lockutils [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1101.793760] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1da91816-f8ba-48fb-9eaf-589934180174 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.813480] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Updating instance '049518bd-d569-491a-8f79-6f0b78cf44b2' progress to 0 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1101.956648] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101671, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079991} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.957047] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1101.957840] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a29f7db-cfc5-46f9-be4e-a5615ba53dbe {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.980897] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 17350ce4-555b-4f00-9a75-de32a4453141/17350ce4-555b-4f00-9a75-de32a4453141.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1101.981313] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af34d784-fea7-4880-9270-0a18f2287517 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.003812] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1102.003812] env[63538]: value = "task-5101672" [ 1102.003812] env[63538]: _type = "Task" [ 1102.003812] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.015950] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101672, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.090461] env[63538]: DEBUG nova.scheduler.client.report [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1102.320741] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1102.321027] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-511181bf-1617-40d9-b655-e3ad40e8cc70 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.329390] env[63538]: DEBUG oslo_vmware.api [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1102.329390] env[63538]: value = "task-5101673" [ 1102.329390] env[63538]: _type = "Task" [ 1102.329390] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.340313] env[63538]: DEBUG oslo_vmware.api [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101673, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.517332] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101672, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.596895] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.296s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1102.597600] env[63538]: DEBUG nova.compute.manager [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1102.600498] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.986s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1102.601971] env[63538]: INFO nova.compute.claims [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1102.846208] env[63538]: DEBUG oslo_vmware.api [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101673, 'name': PowerOffVM_Task, 'duration_secs': 0.220911} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.846626] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1102.846872] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Updating instance '049518bd-d569-491a-8f79-6f0b78cf44b2' progress to 17 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1103.016390] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101672, 'name': ReconfigVM_Task, 'duration_secs': 0.577283} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.019758] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 17350ce4-555b-4f00-9a75-de32a4453141/17350ce4-555b-4f00-9a75-de32a4453141.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1103.019758] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5b0c478b-52a1-4e0f-af6d-855259be3a79 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.026498] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1103.026498] env[63538]: value = "task-5101674" [ 1103.026498] env[63538]: _type = "Task" [ 1103.026498] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.036125] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101674, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.109510] env[63538]: DEBUG nova.compute.utils [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1103.111072] env[63538]: DEBUG nova.compute.manager [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1103.111406] env[63538]: DEBUG nova.network.neutron [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1103.163299] env[63538]: DEBUG nova.policy [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ebe77c0e32ce4a32b290ba5088e107f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7063c42297c24f2baf7271fa25dec927', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1103.354813] env[63538]: DEBUG nova.virt.hardware [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1103.356390] env[63538]: DEBUG nova.virt.hardware [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1103.356390] env[63538]: DEBUG nova.virt.hardware [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1103.356390] env[63538]: DEBUG nova.virt.hardware [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1103.356390] env[63538]: DEBUG nova.virt.hardware [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1103.356390] env[63538]: DEBUG nova.virt.hardware [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1103.356390] env[63538]: DEBUG nova.virt.hardware [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1103.356390] env[63538]: DEBUG nova.virt.hardware [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1103.356701] env[63538]: DEBUG nova.virt.hardware [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1103.356701] env[63538]: DEBUG nova.virt.hardware [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1103.356940] env[63538]: DEBUG nova.virt.hardware [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1103.363581] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3bc19317-760c-4751-90ea-0734fcb35bb2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.387640] env[63538]: DEBUG oslo_vmware.api [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1103.387640] env[63538]: value = "task-5101675" [ 1103.387640] env[63538]: _type = "Task" [ 1103.387640] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.402180] env[63538]: DEBUG oslo_vmware.api [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101675, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.493234] env[63538]: DEBUG nova.network.neutron [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Successfully created port: f31eeedc-851d-457f-8464-c8562fdeaf87 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1103.537101] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101674, 'name': Rename_Task, 'duration_secs': 0.210196} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.537518] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1103.537835] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8247f81a-1a2d-46eb-af07-16dc873a25f3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.546325] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1103.546325] env[63538]: value = "task-5101676" [ 1103.546325] env[63538]: _type = "Task" [ 1103.546325] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.557252] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101676, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.616073] env[63538]: DEBUG nova.compute.manager [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1103.801018] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331f3e68-c7f6-4c62-bb61-e136efa29637 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.808445] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c7fc51e-ae81-48c0-a06b-34d90aa10687 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.849886] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aafc320f-1f0d-486b-9742-62a85c9365ee {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.858538] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a989fe-0b4a-4f4e-be84-4cafafd71822 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.873672] env[63538]: DEBUG nova.compute.provider_tree [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1103.898262] env[63538]: DEBUG oslo_vmware.api [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101675, 'name': ReconfigVM_Task, 'duration_secs': 0.245486} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.898592] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Updating instance '049518bd-d569-491a-8f79-6f0b78cf44b2' progress to 33 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1104.061415] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101676, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.377364] env[63538]: DEBUG nova.scheduler.client.report [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1104.406809] env[63538]: DEBUG nova.virt.hardware [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1104.406809] env[63538]: DEBUG nova.virt.hardware [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1104.406809] env[63538]: DEBUG nova.virt.hardware [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1104.406809] env[63538]: DEBUG nova.virt.hardware [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1104.406809] env[63538]: DEBUG nova.virt.hardware [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1104.406993] env[63538]: DEBUG nova.virt.hardware [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1104.407234] env[63538]: DEBUG nova.virt.hardware [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1104.407484] env[63538]: DEBUG nova.virt.hardware [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1104.407722] env[63538]: DEBUG nova.virt.hardware [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1104.407930] env[63538]: DEBUG nova.virt.hardware [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1104.408176] env[63538]: DEBUG nova.virt.hardware [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1104.413784] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Reconfiguring VM instance instance-00000065 to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1104.414529] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6f7e45c-28f8-4be2-86e7-1554b1d59ea0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.434987] env[63538]: DEBUG oslo_vmware.api [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1104.434987] env[63538]: value = "task-5101677" [ 1104.434987] env[63538]: _type = "Task" [ 1104.434987] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.444563] env[63538]: DEBUG oslo_vmware.api [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101677, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.559157] env[63538]: DEBUG oslo_vmware.api [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101676, 'name': PowerOnVM_Task, 'duration_secs': 0.515538} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.559472] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1104.559713] env[63538]: INFO nova.compute.manager [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Took 8.66 seconds to spawn the instance on the hypervisor. [ 1104.559923] env[63538]: DEBUG nova.compute.manager [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1104.560758] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb168e2-c2f5-4006-ab0d-ae016052aed5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.629519] env[63538]: DEBUG nova.compute.manager [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1104.661444] env[63538]: DEBUG nova.virt.hardware [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1104.661788] env[63538]: DEBUG nova.virt.hardware [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1104.661961] env[63538]: DEBUG nova.virt.hardware [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1104.662236] env[63538]: DEBUG nova.virt.hardware [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1104.662421] env[63538]: DEBUG nova.virt.hardware [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1104.662592] env[63538]: DEBUG nova.virt.hardware [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1104.662859] env[63538]: DEBUG nova.virt.hardware [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1104.663071] env[63538]: DEBUG nova.virt.hardware [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1104.663271] env[63538]: DEBUG nova.virt.hardware [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1104.663489] env[63538]: DEBUG nova.virt.hardware [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1104.663697] env[63538]: DEBUG nova.virt.hardware [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1104.664720] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ebcb2e1-789b-4137-beec-ea962aa2e4d3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.674759] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef5cb75c-ac54-4f7c-8e84-828c468e189d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.819332] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Acquiring lock "12beddad-1f19-4cee-b885-3079e3603ba3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1104.819588] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Lock "12beddad-1f19-4cee-b885-3079e3603ba3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1104.882938] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.282s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1104.884026] env[63538]: DEBUG nova.compute.manager [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1104.886873] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.791s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1104.888532] env[63538]: INFO nova.compute.claims [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1104.945843] env[63538]: DEBUG oslo_vmware.api [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101677, 'name': ReconfigVM_Task, 'duration_secs': 0.310058} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.945843] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Reconfigured VM instance instance-00000065 to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1104.947047] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc54b3a-de55-446a-8766-9350744ab774 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.969786] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 049518bd-d569-491a-8f79-6f0b78cf44b2/049518bd-d569-491a-8f79-6f0b78cf44b2.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1104.970621] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43db0d7c-3321-438f-9034-1a7df9861f1b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.989555] env[63538]: DEBUG oslo_vmware.api [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1104.989555] env[63538]: value = "task-5101678" [ 1104.989555] env[63538]: _type = "Task" [ 1104.989555] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.998887] env[63538]: DEBUG oslo_vmware.api [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101678, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.033158] env[63538]: DEBUG nova.compute.manager [req-496e5e06-b6e9-4c05-9124-b56dbbc5e2c1 req-a4169622-2cf1-4581-8555-46098510561c service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Received event network-vif-plugged-f31eeedc-851d-457f-8464-c8562fdeaf87 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1105.033393] env[63538]: DEBUG oslo_concurrency.lockutils [req-496e5e06-b6e9-4c05-9124-b56dbbc5e2c1 req-a4169622-2cf1-4581-8555-46098510561c service nova] Acquiring lock "4387a3ec-0f0b-4917-97f3-08c737bee4e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.033637] env[63538]: DEBUG oslo_concurrency.lockutils [req-496e5e06-b6e9-4c05-9124-b56dbbc5e2c1 req-a4169622-2cf1-4581-8555-46098510561c service nova] Lock "4387a3ec-0f0b-4917-97f3-08c737bee4e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.033942] env[63538]: DEBUG oslo_concurrency.lockutils [req-496e5e06-b6e9-4c05-9124-b56dbbc5e2c1 req-a4169622-2cf1-4581-8555-46098510561c service nova] Lock "4387a3ec-0f0b-4917-97f3-08c737bee4e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.034201] env[63538]: DEBUG nova.compute.manager [req-496e5e06-b6e9-4c05-9124-b56dbbc5e2c1 req-a4169622-2cf1-4581-8555-46098510561c service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] No waiting events found dispatching network-vif-plugged-f31eeedc-851d-457f-8464-c8562fdeaf87 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1105.034420] env[63538]: WARNING nova.compute.manager [req-496e5e06-b6e9-4c05-9124-b56dbbc5e2c1 req-a4169622-2cf1-4581-8555-46098510561c service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Received unexpected event network-vif-plugged-f31eeedc-851d-457f-8464-c8562fdeaf87 for instance with vm_state building and task_state spawning. [ 1105.079562] env[63538]: INFO nova.compute.manager [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Took 21.07 seconds to build instance. [ 1105.170409] env[63538]: DEBUG nova.network.neutron [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Successfully updated port: f31eeedc-851d-457f-8464-c8562fdeaf87 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1105.322228] env[63538]: DEBUG nova.compute.manager [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1105.396506] env[63538]: DEBUG nova.compute.utils [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1105.398049] env[63538]: DEBUG nova.compute.manager [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1105.398238] env[63538]: DEBUG nova.network.neutron [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1105.459669] env[63538]: DEBUG nova.policy [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '29408f9a52d44cbc8eaa4e3b425b475a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1fe11c1386b14d139f4416cbf20fb201', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1105.501924] env[63538]: DEBUG oslo_vmware.api [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101678, 'name': ReconfigVM_Task, 'duration_secs': 0.295904} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.502229] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 049518bd-d569-491a-8f79-6f0b78cf44b2/049518bd-d569-491a-8f79-6f0b78cf44b2.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1105.502557] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Updating instance '049518bd-d569-491a-8f79-6f0b78cf44b2' progress to 50 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1105.582073] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2ad30aee-cf7e-4d45-84d8-be7a68ffd12c tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "17350ce4-555b-4f00-9a75-de32a4453141" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.577s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.673734] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "refresh_cache-4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1105.674256] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "refresh_cache-4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.674696] env[63538]: DEBUG nova.network.neutron [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1105.790886] env[63538]: DEBUG nova.network.neutron [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Successfully created port: 319cec1c-49eb-43a4-a9ec-6b74a507b6d6 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1105.849851] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.903498] env[63538]: DEBUG nova.compute.manager [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1105.957405] env[63538]: DEBUG oslo_vmware.rw_handles [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d4b92d-342c-5049-61e7-d4d3d5e1acbf/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1105.958477] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9205d540-1b31-408b-a079-42bd9df3ff0b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.969888] env[63538]: DEBUG oslo_vmware.rw_handles [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d4b92d-342c-5049-61e7-d4d3d5e1acbf/disk-0.vmdk is in state: ready. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1105.970226] env[63538]: ERROR oslo_vmware.rw_handles [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d4b92d-342c-5049-61e7-d4d3d5e1acbf/disk-0.vmdk due to incomplete transfer. [ 1105.970616] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-65f2e07c-e461-4ca1-a780-5b34136ece12 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.983264] env[63538]: DEBUG oslo_vmware.rw_handles [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d4b92d-342c-5049-61e7-d4d3d5e1acbf/disk-0.vmdk. {{(pid=63538) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1105.983628] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Uploaded image 46702d35-6127-4638-a292-93cafec9d37b to the Glance image server {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 1105.985978] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Destroying the VM {{(pid=63538) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 1105.986396] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-74746149-4e20-4787-8724-54497175baa7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.993118] env[63538]: DEBUG oslo_vmware.api [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1105.993118] env[63538]: value = "task-5101679" [ 1105.993118] env[63538]: _type = "Task" [ 1105.993118] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.009023] env[63538]: DEBUG oslo_vmware.api [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101679, 'name': Destroy_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.010607] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eac82740-9509-46ef-b1ee-4acf2fdb9dfd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.042180] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6a66e8-1613-4567-a4f5-b63e8248267f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.066710] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Updating instance '049518bd-d569-491a-8f79-6f0b78cf44b2' progress to 67 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1106.150401] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f5cf58-eccb-454d-b535-e18259cc3892 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.159768] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c8ba85b-5966-4f8b-9bad-b14fb34f0b9c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.194513] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf186b4-1839-4100-ab1c-867aa515c9d0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.206415] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d075e919-8ccc-401a-9fcd-62eb1cd63340 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.222302] env[63538]: DEBUG nova.compute.provider_tree [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1106.234747] env[63538]: DEBUG nova.network.neutron [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1106.504074] env[63538]: DEBUG oslo_vmware.api [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101679, 'name': Destroy_Task, 'duration_secs': 0.393864} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.504364] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Destroyed the VM [ 1106.504618] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Deleting Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1106.504886] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-3215d147-2bbc-49f7-b468-67dffacfc6f1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.512370] env[63538]: DEBUG oslo_vmware.api [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1106.512370] env[63538]: value = "task-5101680" [ 1106.512370] env[63538]: _type = "Task" [ 1106.512370] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.521911] env[63538]: DEBUG oslo_vmware.api [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101680, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.635013] env[63538]: DEBUG nova.network.neutron [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Port da39877d-c305-4a70-8310-b2ad992f0cc7 binding to destination host cpu-1 is already ACTIVE {{(pid=63538) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1106.689568] env[63538]: DEBUG nova.network.neutron [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Updating instance_info_cache with network_info: [{"id": "f31eeedc-851d-457f-8464-c8562fdeaf87", "address": "fa:16:3e:8d:d6:64", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf31eeedc-85", "ovs_interfaceid": "f31eeedc-851d-457f-8464-c8562fdeaf87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.705460] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "17350ce4-555b-4f00-9a75-de32a4453141" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1106.705772] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "17350ce4-555b-4f00-9a75-de32a4453141" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1106.705991] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "17350ce4-555b-4f00-9a75-de32a4453141-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1106.706204] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "17350ce4-555b-4f00-9a75-de32a4453141-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1106.706414] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "17350ce4-555b-4f00-9a75-de32a4453141-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1106.709277] env[63538]: INFO nova.compute.manager [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Terminating instance [ 1106.711509] env[63538]: DEBUG nova.compute.manager [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1106.711724] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1106.712618] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee13ce93-5fc2-42cc-a669-418675d58d46 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.721071] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1106.721343] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-38c88328-ff83-4d2d-a0ef-76223b206b90 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.725760] env[63538]: DEBUG nova.scheduler.client.report [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1106.732227] env[63538]: DEBUG oslo_vmware.api [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1106.732227] env[63538]: value = "task-5101681" [ 1106.732227] env[63538]: _type = "Task" [ 1106.732227] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.743481] env[63538]: DEBUG oslo_vmware.api [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101681, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.916709] env[63538]: DEBUG nova.compute.manager [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1106.946039] env[63538]: DEBUG nova.virt.hardware [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1106.946263] env[63538]: DEBUG nova.virt.hardware [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1106.946453] env[63538]: DEBUG nova.virt.hardware [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1106.946794] env[63538]: DEBUG nova.virt.hardware [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1106.946845] env[63538]: DEBUG nova.virt.hardware [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1106.946978] env[63538]: DEBUG nova.virt.hardware [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1106.947256] env[63538]: DEBUG nova.virt.hardware [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1106.947492] env[63538]: DEBUG nova.virt.hardware [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1106.947639] env[63538]: DEBUG nova.virt.hardware [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1106.947818] env[63538]: DEBUG nova.virt.hardware [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1106.947997] env[63538]: DEBUG nova.virt.hardware [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1106.948979] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-386bca8f-a706-4fee-a872-c35a529a6ff0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.958401] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5adb9e62-4fe3-4105-8e24-2f0792eac41e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.022771] env[63538]: DEBUG oslo_vmware.api [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101680, 'name': RemoveSnapshot_Task} progress is 65%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.065128] env[63538]: DEBUG nova.compute.manager [req-b9c7d92d-81e4-49b3-80b1-f9d6c363a241 req-4ee93385-8708-44d0-b2bd-2313e79665f2 service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Received event network-changed-f31eeedc-851d-457f-8464-c8562fdeaf87 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1107.065460] env[63538]: DEBUG nova.compute.manager [req-b9c7d92d-81e4-49b3-80b1-f9d6c363a241 req-4ee93385-8708-44d0-b2bd-2313e79665f2 service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Refreshing instance network info cache due to event network-changed-f31eeedc-851d-457f-8464-c8562fdeaf87. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1107.065786] env[63538]: DEBUG oslo_concurrency.lockutils [req-b9c7d92d-81e4-49b3-80b1-f9d6c363a241 req-4ee93385-8708-44d0-b2bd-2313e79665f2 service nova] Acquiring lock "refresh_cache-4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1107.193052] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "refresh_cache-4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1107.193052] env[63538]: DEBUG nova.compute.manager [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Instance network_info: |[{"id": "f31eeedc-851d-457f-8464-c8562fdeaf87", "address": "fa:16:3e:8d:d6:64", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf31eeedc-85", "ovs_interfaceid": "f31eeedc-851d-457f-8464-c8562fdeaf87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1107.193633] env[63538]: DEBUG oslo_concurrency.lockutils [req-b9c7d92d-81e4-49b3-80b1-f9d6c363a241 req-4ee93385-8708-44d0-b2bd-2313e79665f2 service nova] Acquired lock "refresh_cache-4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.193633] env[63538]: DEBUG nova.network.neutron [req-b9c7d92d-81e4-49b3-80b1-f9d6c363a241 req-4ee93385-8708-44d0-b2bd-2313e79665f2 service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Refreshing network info cache for port f31eeedc-851d-457f-8464-c8562fdeaf87 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1107.195128] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:d6:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '412cde91-d0f0-4193-b36b-d8b9d17384c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f31eeedc-851d-457f-8464-c8562fdeaf87', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1107.205247] env[63538]: DEBUG oslo.service.loopingcall [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1107.208907] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1107.209407] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f2d81772-29dc-4e5e-8912-a58ffaaacce9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.231148] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1107.231148] env[63538]: value = "task-5101682" [ 1107.231148] env[63538]: _type = "Task" [ 1107.231148] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.232180] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.345s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1107.232661] env[63538]: DEBUG nova.compute.manager [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1107.238561] env[63538]: DEBUG oslo_concurrency.lockutils [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.638s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.243969] env[63538]: INFO nova.compute.claims [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1107.255955] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101682, 'name': CreateVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.260427] env[63538]: DEBUG oslo_vmware.api [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101681, 'name': PowerOffVM_Task, 'duration_secs': 0.270751} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.260427] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1107.260692] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1107.260935] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-648f3abf-b2c0-4710-aaa3-3b8de7e1e10e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.340266] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1107.340618] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1107.340837] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Deleting the datastore file [datastore1] 17350ce4-555b-4f00-9a75-de32a4453141 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1107.341205] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-da8baf3c-2089-4ea6-97bf-3863d58da76c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.350684] env[63538]: DEBUG oslo_vmware.api [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for the task: (returnval){ [ 1107.350684] env[63538]: value = "task-5101684" [ 1107.350684] env[63538]: _type = "Task" [ 1107.350684] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.362564] env[63538]: DEBUG oslo_vmware.api [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101684, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.515971] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1107.515971] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Cleaning up deleted instances {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1107.538126] env[63538]: DEBUG oslo_vmware.api [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101680, 'name': RemoveSnapshot_Task, 'duration_secs': 0.63534} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.538621] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Deleted Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1107.539043] env[63538]: INFO nova.compute.manager [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Took 15.39 seconds to snapshot the instance on the hypervisor. [ 1107.607443] env[63538]: DEBUG nova.network.neutron [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Successfully updated port: 319cec1c-49eb-43a4-a9ec-6b74a507b6d6 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1107.630847] env[63538]: DEBUG nova.network.neutron [req-b9c7d92d-81e4-49b3-80b1-f9d6c363a241 req-4ee93385-8708-44d0-b2bd-2313e79665f2 service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Updated VIF entry in instance network info cache for port f31eeedc-851d-457f-8464-c8562fdeaf87. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1107.631243] env[63538]: DEBUG nova.network.neutron [req-b9c7d92d-81e4-49b3-80b1-f9d6c363a241 req-4ee93385-8708-44d0-b2bd-2313e79665f2 service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Updating instance_info_cache with network_info: [{"id": "f31eeedc-851d-457f-8464-c8562fdeaf87", "address": "fa:16:3e:8d:d6:64", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf31eeedc-85", "ovs_interfaceid": "f31eeedc-851d-457f-8464-c8562fdeaf87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.659686] env[63538]: DEBUG oslo_concurrency.lockutils [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "049518bd-d569-491a-8f79-6f0b78cf44b2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.659941] env[63538]: DEBUG oslo_concurrency.lockutils [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "049518bd-d569-491a-8f79-6f0b78cf44b2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.660125] env[63538]: DEBUG oslo_concurrency.lockutils [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "049518bd-d569-491a-8f79-6f0b78cf44b2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1107.743152] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101682, 'name': CreateVM_Task, 'duration_secs': 0.368153} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.743152] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1107.744692] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1107.744692] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.744692] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1107.744692] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9ccc816-39ae-4167-993d-d79a1fcefa1a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.751097] env[63538]: DEBUG nova.compute.utils [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1107.752554] env[63538]: DEBUG oslo_vmware.api [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1107.752554] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5266f3cd-62d2-ccf8-0f24-09c917b30628" [ 1107.752554] env[63538]: _type = "Task" [ 1107.752554] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.754801] env[63538]: DEBUG nova.compute.manager [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1107.754988] env[63538]: DEBUG nova.network.neutron [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1107.767755] env[63538]: DEBUG oslo_vmware.api [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5266f3cd-62d2-ccf8-0f24-09c917b30628, 'name': SearchDatastore_Task, 'duration_secs': 0.011617} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.768071] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1107.770021] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1107.770021] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1107.770021] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.770021] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1107.770021] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1660ee12-e0d5-42f6-92d5-5729dad697f1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.778663] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1107.778846] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1107.779605] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c802a223-dbd3-41db-97ff-3dfe43c86ebc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.785772] env[63538]: DEBUG oslo_vmware.api [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1107.785772] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5260e428-1d65-84f3-76ae-e66c89a01623" [ 1107.785772] env[63538]: _type = "Task" [ 1107.785772] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.794327] env[63538]: DEBUG oslo_vmware.api [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5260e428-1d65-84f3-76ae-e66c89a01623, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.827144] env[63538]: DEBUG nova.policy [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cd6de144ccc4498aa90ae01ca7a0f6f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0d6954a5254f441ca256c85330297cef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1107.863076] env[63538]: DEBUG oslo_vmware.api [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Task: {'id': task-5101684, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192258} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.863351] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1107.863543] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1107.863727] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1107.863905] env[63538]: INFO nova.compute.manager [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1107.864185] env[63538]: DEBUG oslo.service.loopingcall [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1107.864419] env[63538]: DEBUG nova.compute.manager [-] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1107.864521] env[63538]: DEBUG nova.network.neutron [-] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1108.037552] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] There are 48 instances to clean {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11326}} [ 1108.037755] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 0c19d662-4ae0-4ec9-93b4-9bd45822ed92] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1108.106420] env[63538]: DEBUG nova.compute.manager [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Found 3 images (rotation: 2) {{(pid=63538) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1108.106752] env[63538]: DEBUG nova.compute.manager [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Rotating out 1 backups {{(pid=63538) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4563}} [ 1108.107129] env[63538]: DEBUG nova.compute.manager [None req-ea93ebf4-4705-44f7-80d8-7c2bd74331ca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Deleting image 2e0382b2-f42e-4c8c-8f41-ce9c70949ae8 {{(pid=63538) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4568}} [ 1108.109620] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "refresh_cache-d00151c1-ca34-4c57-9ed2-74d506a0cffb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1108.109818] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquired lock "refresh_cache-d00151c1-ca34-4c57-9ed2-74d506a0cffb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.109955] env[63538]: DEBUG nova.network.neutron [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1108.133397] env[63538]: DEBUG oslo_concurrency.lockutils [req-b9c7d92d-81e4-49b3-80b1-f9d6c363a241 req-4ee93385-8708-44d0-b2bd-2313e79665f2 service nova] Releasing lock "refresh_cache-4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1108.258900] env[63538]: DEBUG nova.compute.manager [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1108.306723] env[63538]: DEBUG oslo_vmware.api [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5260e428-1d65-84f3-76ae-e66c89a01623, 'name': SearchDatastore_Task, 'duration_secs': 0.012035} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.308310] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-356b44aa-2613-4e3e-afe8-63cb59bd201f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.322474] env[63538]: DEBUG oslo_vmware.api [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1108.322474] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5262c429-d0c7-fbea-855a-decf2d9d95b4" [ 1108.322474] env[63538]: _type = "Task" [ 1108.322474] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.336136] env[63538]: DEBUG oslo_vmware.api [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5262c429-d0c7-fbea-855a-decf2d9d95b4, 'name': SearchDatastore_Task, 'duration_secs': 0.012176} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.339583] env[63538]: DEBUG nova.network.neutron [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Successfully created port: c8b86754-970c-4f8a-b3fb-ec8fb42d3863 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1108.341450] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1108.341726] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 4387a3ec-0f0b-4917-97f3-08c737bee4e7/4387a3ec-0f0b-4917-97f3-08c737bee4e7.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1108.342238] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9d4336db-1586-4cc2-9e29-70be97b15b00 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.350808] env[63538]: DEBUG oslo_vmware.api [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1108.350808] env[63538]: value = "task-5101685" [ 1108.350808] env[63538]: _type = "Task" [ 1108.350808] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.362103] env[63538]: DEBUG oslo_vmware.api [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101685, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.488530] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ff5d2d5-878f-4fa5-8be8-439f5392c66f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.496279] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e0182c9-c7aa-4cef-9591-e175d5d8fbe0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.528210] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02498192-3b66-420f-9e3a-d0a0894324db {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.537045] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a343a871-0464-4341-8fc5-d435146064c3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.541585] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: cbd40984-29b6-4ed9-8c87-9fd4c80f6f13] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1108.554137] env[63538]: DEBUG nova.compute.provider_tree [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1108.684833] env[63538]: DEBUG nova.network.neutron [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1108.708316] env[63538]: DEBUG oslo_concurrency.lockutils [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "refresh_cache-049518bd-d569-491a-8f79-6f0b78cf44b2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1108.708545] env[63538]: DEBUG oslo_concurrency.lockutils [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired lock "refresh_cache-049518bd-d569-491a-8f79-6f0b78cf44b2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.708823] env[63538]: DEBUG nova.network.neutron [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1108.749942] env[63538]: DEBUG nova.network.neutron [-] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1108.866708] env[63538]: DEBUG oslo_vmware.api [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101685, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.959652] env[63538]: DEBUG nova.network.neutron [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Updating instance_info_cache with network_info: [{"id": "319cec1c-49eb-43a4-a9ec-6b74a507b6d6", "address": "fa:16:3e:ce:c8:c1", "network": {"id": "8fd1ecf4-3813-4ca4-82b4-8ba3f04e3c41", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1232144260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fe11c1386b14d139f4416cbf20fb201", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap319cec1c-49", "ovs_interfaceid": "319cec1c-49eb-43a4-a9ec-6b74a507b6d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.045178] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 5ef5fe70-fed9-4b3d-9d43-f01cf628d9af] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1109.057412] env[63538]: DEBUG nova.scheduler.client.report [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1109.144492] env[63538]: DEBUG nova.compute.manager [req-44fc17ba-09a1-4f23-a2dd-13f0fa52a5f1 req-b8986280-7878-48d4-b58f-ba7e1fdc8a25 service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Received event network-vif-plugged-319cec1c-49eb-43a4-a9ec-6b74a507b6d6 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1109.144810] env[63538]: DEBUG oslo_concurrency.lockutils [req-44fc17ba-09a1-4f23-a2dd-13f0fa52a5f1 req-b8986280-7878-48d4-b58f-ba7e1fdc8a25 service nova] Acquiring lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1109.144901] env[63538]: DEBUG oslo_concurrency.lockutils [req-44fc17ba-09a1-4f23-a2dd-13f0fa52a5f1 req-b8986280-7878-48d4-b58f-ba7e1fdc8a25 service nova] Lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1109.145094] env[63538]: DEBUG oslo_concurrency.lockutils [req-44fc17ba-09a1-4f23-a2dd-13f0fa52a5f1 req-b8986280-7878-48d4-b58f-ba7e1fdc8a25 service nova] Lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.145328] env[63538]: DEBUG nova.compute.manager [req-44fc17ba-09a1-4f23-a2dd-13f0fa52a5f1 req-b8986280-7878-48d4-b58f-ba7e1fdc8a25 service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] No waiting events found dispatching network-vif-plugged-319cec1c-49eb-43a4-a9ec-6b74a507b6d6 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1109.145452] env[63538]: WARNING nova.compute.manager [req-44fc17ba-09a1-4f23-a2dd-13f0fa52a5f1 req-b8986280-7878-48d4-b58f-ba7e1fdc8a25 service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Received unexpected event network-vif-plugged-319cec1c-49eb-43a4-a9ec-6b74a507b6d6 for instance with vm_state building and task_state spawning. [ 1109.145864] env[63538]: DEBUG nova.compute.manager [req-44fc17ba-09a1-4f23-a2dd-13f0fa52a5f1 req-b8986280-7878-48d4-b58f-ba7e1fdc8a25 service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Received event network-changed-319cec1c-49eb-43a4-a9ec-6b74a507b6d6 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1109.145985] env[63538]: DEBUG nova.compute.manager [req-44fc17ba-09a1-4f23-a2dd-13f0fa52a5f1 req-b8986280-7878-48d4-b58f-ba7e1fdc8a25 service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Refreshing instance network info cache due to event network-changed-319cec1c-49eb-43a4-a9ec-6b74a507b6d6. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1109.146170] env[63538]: DEBUG oslo_concurrency.lockutils [req-44fc17ba-09a1-4f23-a2dd-13f0fa52a5f1 req-b8986280-7878-48d4-b58f-ba7e1fdc8a25 service nova] Acquiring lock "refresh_cache-d00151c1-ca34-4c57-9ed2-74d506a0cffb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1109.252838] env[63538]: INFO nova.compute.manager [-] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Took 1.39 seconds to deallocate network for instance. [ 1109.272851] env[63538]: DEBUG nova.compute.manager [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1109.303287] env[63538]: DEBUG nova.virt.hardware [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1109.306221] env[63538]: DEBUG nova.virt.hardware [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1109.306221] env[63538]: DEBUG nova.virt.hardware [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1109.306221] env[63538]: DEBUG nova.virt.hardware [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1109.306221] env[63538]: DEBUG nova.virt.hardware [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1109.306221] env[63538]: DEBUG nova.virt.hardware [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1109.306221] env[63538]: DEBUG nova.virt.hardware [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1109.306221] env[63538]: DEBUG nova.virt.hardware [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1109.306221] env[63538]: DEBUG nova.virt.hardware [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1109.306221] env[63538]: DEBUG nova.virt.hardware [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1109.306221] env[63538]: DEBUG nova.virt.hardware [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1109.306221] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba1bcf97-2e51-49b0-91a1-4b8c6839e328 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.319499] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0e5da1f-5fb3-4afd-9dba-12258327a6d7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.367803] env[63538]: DEBUG oslo_vmware.api [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101685, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.528555} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.368169] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 4387a3ec-0f0b-4917-97f3-08c737bee4e7/4387a3ec-0f0b-4917-97f3-08c737bee4e7.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1109.368315] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1109.368662] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6c1702e4-4db6-46f0-86e6-14607156b351 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.377564] env[63538]: DEBUG oslo_vmware.api [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1109.377564] env[63538]: value = "task-5101686" [ 1109.377564] env[63538]: _type = "Task" [ 1109.377564] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.390418] env[63538]: DEBUG oslo_vmware.api [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101686, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.465893] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Releasing lock "refresh_cache-d00151c1-ca34-4c57-9ed2-74d506a0cffb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1109.466287] env[63538]: DEBUG nova.compute.manager [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Instance network_info: |[{"id": "319cec1c-49eb-43a4-a9ec-6b74a507b6d6", "address": "fa:16:3e:ce:c8:c1", "network": {"id": "8fd1ecf4-3813-4ca4-82b4-8ba3f04e3c41", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1232144260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fe11c1386b14d139f4416cbf20fb201", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap319cec1c-49", "ovs_interfaceid": "319cec1c-49eb-43a4-a9ec-6b74a507b6d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1109.466701] env[63538]: DEBUG oslo_concurrency.lockutils [req-44fc17ba-09a1-4f23-a2dd-13f0fa52a5f1 req-b8986280-7878-48d4-b58f-ba7e1fdc8a25 service nova] Acquired lock "refresh_cache-d00151c1-ca34-4c57-9ed2-74d506a0cffb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.466898] env[63538]: DEBUG nova.network.neutron [req-44fc17ba-09a1-4f23-a2dd-13f0fa52a5f1 req-b8986280-7878-48d4-b58f-ba7e1fdc8a25 service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Refreshing network info cache for port 319cec1c-49eb-43a4-a9ec-6b74a507b6d6 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1109.468333] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ce:c8:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78340140-126f-4ef8-a340-debaa64da3e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '319cec1c-49eb-43a4-a9ec-6b74a507b6d6', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1109.476704] env[63538]: DEBUG oslo.service.loopingcall [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1109.479770] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1109.480243] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c6e2714-7892-4693-97b4-5847fb1cb1d0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.503014] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1109.503014] env[63538]: value = "task-5101687" [ 1109.503014] env[63538]: _type = "Task" [ 1109.503014] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.518598] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101687, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.546735] env[63538]: DEBUG nova.network.neutron [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Updating instance_info_cache with network_info: [{"id": "da39877d-c305-4a70-8310-b2ad992f0cc7", "address": "fa:16:3e:e7:40:0a", "network": {"id": "01450801-f549-4844-80bd-208ec405c65f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684310085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "492427e54e1048f292dab2abdac71af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda39877d-c3", "ovs_interfaceid": "da39877d-c305-4a70-8310-b2ad992f0cc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.548582] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 7ee64b60-9b88-4710-a477-e984fa36a142] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1109.565502] env[63538]: DEBUG oslo_concurrency.lockutils [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.327s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.566110] env[63538]: DEBUG nova.compute.manager [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1109.569861] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.720s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1109.571397] env[63538]: INFO nova.compute.claims [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1109.733185] env[63538]: DEBUG nova.network.neutron [req-44fc17ba-09a1-4f23-a2dd-13f0fa52a5f1 req-b8986280-7878-48d4-b58f-ba7e1fdc8a25 service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Updated VIF entry in instance network info cache for port 319cec1c-49eb-43a4-a9ec-6b74a507b6d6. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1109.733603] env[63538]: DEBUG nova.network.neutron [req-44fc17ba-09a1-4f23-a2dd-13f0fa52a5f1 req-b8986280-7878-48d4-b58f-ba7e1fdc8a25 service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Updating instance_info_cache with network_info: [{"id": "319cec1c-49eb-43a4-a9ec-6b74a507b6d6", "address": "fa:16:3e:ce:c8:c1", "network": {"id": "8fd1ecf4-3813-4ca4-82b4-8ba3f04e3c41", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1232144260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fe11c1386b14d139f4416cbf20fb201", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap319cec1c-49", "ovs_interfaceid": "319cec1c-49eb-43a4-a9ec-6b74a507b6d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.760149] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1109.888042] env[63538]: DEBUG oslo_vmware.api [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101686, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092659} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.888388] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1109.889337] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d61ccc-4d52-41d8-ba5f-a63f5ce4c7df {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.913456] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 4387a3ec-0f0b-4917-97f3-08c737bee4e7/4387a3ec-0f0b-4917-97f3-08c737bee4e7.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1109.913803] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86f7dfd5-b827-42c6-ba44-03388c198ef9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.934701] env[63538]: DEBUG oslo_vmware.api [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1109.934701] env[63538]: value = "task-5101688" [ 1109.934701] env[63538]: _type = "Task" [ 1109.934701] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.943301] env[63538]: DEBUG oslo_vmware.api [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101688, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.014266] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101687, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.051653] env[63538]: DEBUG oslo_concurrency.lockutils [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Releasing lock "refresh_cache-049518bd-d569-491a-8f79-6f0b78cf44b2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1110.061022] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 7752c64f-693f-4cf3-951c-7ee0657f1682] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1110.068181] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30b01d4f-c8c8-43d1-a857-5df61394d9cf tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "3d80dc17-e330-4575-8e12-e06d8e76274a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1110.068483] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30b01d4f-c8c8-43d1-a857-5df61394d9cf tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "3d80dc17-e330-4575-8e12-e06d8e76274a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1110.068685] env[63538]: DEBUG nova.compute.manager [None req-30b01d4f-c8c8-43d1-a857-5df61394d9cf tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1110.069729] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e852547-f114-46dc-a131-4c26d25a64c4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.081110] env[63538]: DEBUG nova.compute.utils [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1110.086404] env[63538]: DEBUG nova.compute.manager [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1110.086404] env[63538]: DEBUG nova.network.neutron [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1110.095798] env[63538]: DEBUG nova.compute.manager [None req-30b01d4f-c8c8-43d1-a857-5df61394d9cf tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63538) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1110.099580] env[63538]: DEBUG nova.objects.instance [None req-30b01d4f-c8c8-43d1-a857-5df61394d9cf tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lazy-loading 'flavor' on Instance uuid 3d80dc17-e330-4575-8e12-e06d8e76274a {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1110.105549] env[63538]: DEBUG nova.network.neutron [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Successfully updated port: c8b86754-970c-4f8a-b3fb-ec8fb42d3863 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1110.214394] env[63538]: DEBUG nova.policy [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e2ba1b4ea16243eaa2c795869df1b653', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eaf2a7f857d342f1923d44141fa59cfe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1110.236553] env[63538]: DEBUG oslo_concurrency.lockutils [req-44fc17ba-09a1-4f23-a2dd-13f0fa52a5f1 req-b8986280-7878-48d4-b58f-ba7e1fdc8a25 service nova] Releasing lock "refresh_cache-d00151c1-ca34-4c57-9ed2-74d506a0cffb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1110.236876] env[63538]: DEBUG nova.compute.manager [req-44fc17ba-09a1-4f23-a2dd-13f0fa52a5f1 req-b8986280-7878-48d4-b58f-ba7e1fdc8a25 service nova] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Received event network-vif-deleted-09315fe4-910e-4534-9382-1558fa660416 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1110.445725] env[63538]: DEBUG oslo_vmware.api [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101688, 'name': ReconfigVM_Task, 'duration_secs': 0.347059} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.446153] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 4387a3ec-0f0b-4917-97f3-08c737bee4e7/4387a3ec-0f0b-4917-97f3-08c737bee4e7.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1110.446797] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7fe51818-d092-41b3-a398-67668b327ca0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.453205] env[63538]: DEBUG oslo_vmware.api [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1110.453205] env[63538]: value = "task-5101689" [ 1110.453205] env[63538]: _type = "Task" [ 1110.453205] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.462991] env[63538]: DEBUG oslo_vmware.api [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101689, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.513814] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101687, 'name': CreateVM_Task, 'duration_secs': 0.540976} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.514038] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1110.514844] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1110.515038] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.515390] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1110.515657] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87af149f-ed82-4029-9bf8-ed6927fa6a65 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.520754] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1110.520754] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f3c6c1-3520-7558-2904-d920d6a6acc8" [ 1110.520754] env[63538]: _type = "Task" [ 1110.520754] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.529459] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f3c6c1-3520-7558-2904-d920d6a6acc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.571785] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Acquiring lock "6257bf5c-8a1c-4204-9605-cc07491e14ea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1110.572112] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Lock "6257bf5c-8a1c-4204-9605-cc07491e14ea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1110.572356] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Acquiring lock "6257bf5c-8a1c-4204-9605-cc07491e14ea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1110.572628] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Lock "6257bf5c-8a1c-4204-9605-cc07491e14ea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1110.572893] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Lock "6257bf5c-8a1c-4204-9605-cc07491e14ea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1110.576479] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: fdc2c7e7-e42d-4c9a-8c6c-0ea6e3ddb4c9] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1110.582220] env[63538]: INFO nova.compute.manager [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Terminating instance [ 1110.584183] env[63538]: DEBUG nova.compute.manager [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1110.584388] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1110.585269] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ff56fd-f5f6-4e99-9ab8-be8c1dc03736 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.590151] env[63538]: DEBUG nova.compute.manager [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1110.599596] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92860f85-9bac-49ef-aae4-98a6d2cf52c6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.606886] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1110.607864] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-30b01d4f-c8c8-43d1-a857-5df61394d9cf tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1110.608481] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-680f6431-6d1e-43ce-b092-f0fe604fc66e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.610123] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-009213f3-7b73-4e91-8dd8-8b04aebb69bc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.627985] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "refresh_cache-e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1110.628170] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired lock "refresh_cache-e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.628325] env[63538]: DEBUG nova.network.neutron [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1110.635089] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee51023-3639-4cac-895a-fd2b793525cd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.640479] env[63538]: DEBUG oslo_vmware.api [None req-30b01d4f-c8c8-43d1-a857-5df61394d9cf tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1110.640479] env[63538]: value = "task-5101691" [ 1110.640479] env[63538]: _type = "Task" [ 1110.640479] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.640978] env[63538]: DEBUG oslo_vmware.api [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Waiting for the task: (returnval){ [ 1110.640978] env[63538]: value = "task-5101690" [ 1110.640978] env[63538]: _type = "Task" [ 1110.640978] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.655172] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Updating instance '049518bd-d569-491a-8f79-6f0b78cf44b2' progress to 83 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1110.667602] env[63538]: DEBUG oslo_vmware.api [None req-30b01d4f-c8c8-43d1-a857-5df61394d9cf tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101691, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.671339] env[63538]: DEBUG oslo_vmware.api [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Task: {'id': task-5101690, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.821716] env[63538]: DEBUG nova.network.neutron [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Successfully created port: 3e8852b6-74d6-48df-920f-ee0169a7772e {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1110.849181] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37665fa7-eced-48b4-9401-5a3772a6837f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.857580] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff25c07a-d81a-4165-8e05-f3a32d7b8820 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.891430] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8559ec57-8071-4386-8f7d-6caff68cacf6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.900593] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cfd31db-e909-47df-a08a-b42bb3d12bfe {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.918611] env[63538]: DEBUG nova.compute.provider_tree [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1110.963487] env[63538]: DEBUG oslo_vmware.api [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101689, 'name': Rename_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.031344] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f3c6c1-3520-7558-2904-d920d6a6acc8, 'name': SearchDatastore_Task, 'duration_secs': 0.070129} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.031760] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1111.032143] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1111.032526] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1111.032717] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.032917] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1111.033645] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-016312bb-56ae-4d95-97ea-66d2d84a7fa1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.049735] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1111.049949] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1111.050729] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-760e3589-79c5-4d99-ac41-e90dcb3c9bb5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.057014] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1111.057014] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5299a27d-7cce-711a-b289-69f9865ce4d7" [ 1111.057014] env[63538]: _type = "Task" [ 1111.057014] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.067026] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5299a27d-7cce-711a-b289-69f9865ce4d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.084854] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 148790a7-0a35-4d26-ae9f-6f954a161c88] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1111.097329] env[63538]: INFO nova.virt.block_device [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Booting with volume f66d7fd1-7450-49dd-aedf-14fdf30f2e90 at /dev/sda [ 1111.135562] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1dfb65f9-b3c7-4cd9-8ad9-55c46a083c5e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.155271] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-266dff33-7e96-44fc-8fec-cfc8393885d9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.182956] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1111.183392] env[63538]: DEBUG oslo_vmware.api [None req-30b01d4f-c8c8-43d1-a857-5df61394d9cf tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101691, 'name': PowerOffVM_Task, 'duration_secs': 0.274548} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.183716] env[63538]: DEBUG oslo_vmware.api [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Task: {'id': task-5101690, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.185757] env[63538]: DEBUG nova.network.neutron [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1111.188139] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c350b3dd-c3fd-45a5-a712-bf749d2d118c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.190743] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-30b01d4f-c8c8-43d1-a857-5df61394d9cf tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1111.190911] env[63538]: DEBUG nova.compute.manager [None req-30b01d4f-c8c8-43d1-a857-5df61394d9cf tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1111.192090] env[63538]: DEBUG nova.compute.manager [req-3b6ad79b-8d6a-415f-af60-8437cc5edd5b req-ccdf49e7-a661-4a6d-9b08-daee85ace3fe service nova] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Received event network-vif-plugged-c8b86754-970c-4f8a-b3fb-ec8fb42d3863 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1111.192309] env[63538]: DEBUG oslo_concurrency.lockutils [req-3b6ad79b-8d6a-415f-af60-8437cc5edd5b req-ccdf49e7-a661-4a6d-9b08-daee85ace3fe service nova] Acquiring lock "e0d5a3b2-21e1-4de0-ac10-1a5687a60c10-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.192521] env[63538]: DEBUG oslo_concurrency.lockutils [req-3b6ad79b-8d6a-415f-af60-8437cc5edd5b req-ccdf49e7-a661-4a6d-9b08-daee85ace3fe service nova] Lock "e0d5a3b2-21e1-4de0-ac10-1a5687a60c10-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.192697] env[63538]: DEBUG oslo_concurrency.lockutils [req-3b6ad79b-8d6a-415f-af60-8437cc5edd5b req-ccdf49e7-a661-4a6d-9b08-daee85ace3fe service nova] Lock "e0d5a3b2-21e1-4de0-ac10-1a5687a60c10-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.192871] env[63538]: DEBUG nova.compute.manager [req-3b6ad79b-8d6a-415f-af60-8437cc5edd5b req-ccdf49e7-a661-4a6d-9b08-daee85ace3fe service nova] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] No waiting events found dispatching network-vif-plugged-c8b86754-970c-4f8a-b3fb-ec8fb42d3863 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1111.193049] env[63538]: WARNING nova.compute.manager [req-3b6ad79b-8d6a-415f-af60-8437cc5edd5b req-ccdf49e7-a661-4a6d-9b08-daee85ace3fe service nova] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Received unexpected event network-vif-plugged-c8b86754-970c-4f8a-b3fb-ec8fb42d3863 for instance with vm_state building and task_state spawning. [ 1111.193220] env[63538]: DEBUG nova.compute.manager [req-3b6ad79b-8d6a-415f-af60-8437cc5edd5b req-ccdf49e7-a661-4a6d-9b08-daee85ace3fe service nova] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Received event network-changed-c8b86754-970c-4f8a-b3fb-ec8fb42d3863 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1111.193378] env[63538]: DEBUG nova.compute.manager [req-3b6ad79b-8d6a-415f-af60-8437cc5edd5b req-ccdf49e7-a661-4a6d-9b08-daee85ace3fe service nova] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Refreshing instance network info cache due to event network-changed-c8b86754-970c-4f8a-b3fb-ec8fb42d3863. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1111.193551] env[63538]: DEBUG oslo_concurrency.lockutils [req-3b6ad79b-8d6a-415f-af60-8437cc5edd5b req-ccdf49e7-a661-4a6d-9b08-daee85ace3fe service nova] Acquiring lock "refresh_cache-e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1111.194759] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da0dfb9f-318f-41b1-b26c-1f3f46d0e421 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.217430] env[63538]: DEBUG oslo_vmware.api [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1111.217430] env[63538]: value = "task-5101692" [ 1111.217430] env[63538]: _type = "Task" [ 1111.217430] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.217719] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d18f8491-c682-4e52-987e-943532c46f37 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.235846] env[63538]: DEBUG oslo_vmware.api [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101692, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.241473] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8ef995a-0e1b-41ca-8bcf-cf80a762f2a1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.280169] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a4c7090-1c47-4abc-895c-8db6c7e405cd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.289302] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20993e02-f1fe-450c-95d6-f914fdb2c0f7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.306844] env[63538]: DEBUG nova.virt.block_device [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Updating existing volume attachment record: c348083d-f8d2-452b-8be5-9129cc137333 {{(pid=63538) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1111.421736] env[63538]: DEBUG nova.scheduler.client.report [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1111.466492] env[63538]: DEBUG oslo_vmware.api [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101689, 'name': Rename_Task, 'duration_secs': 0.992904} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.467333] env[63538]: DEBUG nova.network.neutron [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Updating instance_info_cache with network_info: [{"id": "c8b86754-970c-4f8a-b3fb-ec8fb42d3863", "address": "fa:16:3e:e0:99:90", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8b86754-97", "ovs_interfaceid": "c8b86754-970c-4f8a-b3fb-ec8fb42d3863", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.468582] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1111.468912] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8c32edf3-09e8-44c2-b02f-6f3e217e8ca8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.477727] env[63538]: DEBUG oslo_vmware.api [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1111.477727] env[63538]: value = "task-5101693" [ 1111.477727] env[63538]: _type = "Task" [ 1111.477727] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.489343] env[63538]: DEBUG oslo_vmware.api [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101693, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.567976] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5299a27d-7cce-711a-b289-69f9865ce4d7, 'name': SearchDatastore_Task, 'duration_secs': 0.013839} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.568786] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f4a81f2-3478-434e-bbe2-6a836f902b09 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.575197] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1111.575197] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ddb187-f559-62f2-e0e6-5d0540b6d2b8" [ 1111.575197] env[63538]: _type = "Task" [ 1111.575197] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.583689] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ddb187-f559-62f2-e0e6-5d0540b6d2b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.588241] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 0a0d0372-dede-4df0-bb9e-231e8a5b3742] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1111.652494] env[63538]: DEBUG oslo_vmware.api [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Task: {'id': task-5101690, 'name': PowerOffVM_Task, 'duration_secs': 0.824007} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.652801] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1111.652978] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1111.653256] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bb038c74-0000-4007-a753-3ae966a1c7f9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.708385] env[63538]: DEBUG oslo_concurrency.lockutils [None req-30b01d4f-c8c8-43d1-a857-5df61394d9cf tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "3d80dc17-e330-4575-8e12-e06d8e76274a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.640s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.717263] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1111.717633] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1111.717898] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Deleting the datastore file [datastore1] 6257bf5c-8a1c-4204-9605-cc07491e14ea {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1111.718263] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4d7939cc-e602-4f69-a9da-855bb673059e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.734499] env[63538]: DEBUG oslo_vmware.api [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101692, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.736259] env[63538]: DEBUG oslo_vmware.api [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Waiting for the task: (returnval){ [ 1111.736259] env[63538]: value = "task-5101695" [ 1111.736259] env[63538]: _type = "Task" [ 1111.736259] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.746548] env[63538]: DEBUG oslo_vmware.api [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Task: {'id': task-5101695, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.929183] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.359s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.929952] env[63538]: DEBUG nova.compute.manager [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1111.933182] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.173s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.933512] env[63538]: DEBUG nova.objects.instance [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lazy-loading 'resources' on Instance uuid 17350ce4-555b-4f00-9a75-de32a4453141 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1111.971601] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Releasing lock "refresh_cache-e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1111.971952] env[63538]: DEBUG nova.compute.manager [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Instance network_info: |[{"id": "c8b86754-970c-4f8a-b3fb-ec8fb42d3863", "address": "fa:16:3e:e0:99:90", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8b86754-97", "ovs_interfaceid": "c8b86754-970c-4f8a-b3fb-ec8fb42d3863", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1111.972548] env[63538]: DEBUG oslo_concurrency.lockutils [req-3b6ad79b-8d6a-415f-af60-8437cc5edd5b req-ccdf49e7-a661-4a6d-9b08-daee85ace3fe service nova] Acquired lock "refresh_cache-e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.972762] env[63538]: DEBUG nova.network.neutron [req-3b6ad79b-8d6a-415f-af60-8437cc5edd5b req-ccdf49e7-a661-4a6d-9b08-daee85ace3fe service nova] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Refreshing network info cache for port c8b86754-970c-4f8a-b3fb-ec8fb42d3863 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1111.974500] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:99:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f39e3b37-7906-4bbc-820e-ceac74e4d827', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c8b86754-970c-4f8a-b3fb-ec8fb42d3863', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1111.982692] env[63538]: DEBUG oslo.service.loopingcall [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1111.983701] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1111.988043] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e780d8a8-9a95-4293-87ec-dcece250cfee {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.021491] env[63538]: DEBUG oslo_vmware.api [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101693, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.023116] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1112.023116] env[63538]: value = "task-5101696" [ 1112.023116] env[63538]: _type = "Task" [ 1112.023116] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.033289] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101696, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.090020] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ddb187-f559-62f2-e0e6-5d0540b6d2b8, 'name': SearchDatastore_Task, 'duration_secs': 0.014349} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.090020] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1112.090020] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] d00151c1-ca34-4c57-9ed2-74d506a0cffb/d00151c1-ca34-4c57-9ed2-74d506a0cffb.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1112.090366] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-536b9c5c-d880-4347-a4cb-dd8875b0f3c4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.092748] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 144df97e-f47b-4ead-8243-345d98b9f3e6] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1112.100871] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1112.100871] env[63538]: value = "task-5101697" [ 1112.100871] env[63538]: _type = "Task" [ 1112.100871] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.111782] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101697, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.233031] env[63538]: DEBUG oslo_vmware.api [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101692, 'name': PowerOnVM_Task, 'duration_secs': 0.683632} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.233408] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1112.233665] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-19611634-36ad-40de-a1d3-5984d728d609 tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Updating instance '049518bd-d569-491a-8f79-6f0b78cf44b2' progress to 100 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1112.247689] env[63538]: DEBUG oslo_vmware.api [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Task: {'id': task-5101695, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.287006} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.248017] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1112.248350] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1112.248600] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1112.248791] env[63538]: INFO nova.compute.manager [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Took 1.66 seconds to destroy the instance on the hypervisor. [ 1112.248981] env[63538]: DEBUG oslo.service.loopingcall [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1112.249219] env[63538]: DEBUG nova.compute.manager [-] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1112.249318] env[63538]: DEBUG nova.network.neutron [-] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1112.438553] env[63538]: DEBUG nova.compute.utils [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1112.444845] env[63538]: DEBUG nova.compute.manager [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Not allocating networking since 'none' was specified. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1112.501598] env[63538]: DEBUG oslo_vmware.api [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101693, 'name': PowerOnVM_Task, 'duration_secs': 0.59452} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.502835] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1112.503356] env[63538]: INFO nova.compute.manager [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Took 7.87 seconds to spawn the instance on the hypervisor. [ 1112.503719] env[63538]: DEBUG nova.compute.manager [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1112.504973] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93cfed8a-29ea-4994-910f-5a7d23fb5bf6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.544018] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101696, 'name': CreateVM_Task, 'duration_secs': 0.384602} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.544722] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1112.546524] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1112.547323] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.550756] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1112.550756] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a34bf244-338c-4df2-8a3a-ef2c5d4cde14 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.559763] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1112.559763] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]528224d3-c808-1146-00ce-635e7076804d" [ 1112.559763] env[63538]: _type = "Task" [ 1112.559763] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.574988] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]528224d3-c808-1146-00ce-635e7076804d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.597789] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 4f81dc4e-2092-4a2c-a511-589d47d118b6] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1112.615024] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101697, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.649243] env[63538]: DEBUG nova.compute.manager [req-a3b64118-6007-43dc-9518-50eaab913aa3 req-016f84f2-27e9-4f6c-9cb4-c2885ca1dd90 service nova] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Received event network-vif-plugged-3e8852b6-74d6-48df-920f-ee0169a7772e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1112.649243] env[63538]: DEBUG oslo_concurrency.lockutils [req-a3b64118-6007-43dc-9518-50eaab913aa3 req-016f84f2-27e9-4f6c-9cb4-c2885ca1dd90 service nova] Acquiring lock "aaf52cad-86fd-42df-8ee3-13724e3f5e94-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1112.650685] env[63538]: DEBUG oslo_concurrency.lockutils [req-a3b64118-6007-43dc-9518-50eaab913aa3 req-016f84f2-27e9-4f6c-9cb4-c2885ca1dd90 service nova] Lock "aaf52cad-86fd-42df-8ee3-13724e3f5e94-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1112.651034] env[63538]: DEBUG oslo_concurrency.lockutils [req-a3b64118-6007-43dc-9518-50eaab913aa3 req-016f84f2-27e9-4f6c-9cb4-c2885ca1dd90 service nova] Lock "aaf52cad-86fd-42df-8ee3-13724e3f5e94-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1112.651373] env[63538]: DEBUG nova.compute.manager [req-a3b64118-6007-43dc-9518-50eaab913aa3 req-016f84f2-27e9-4f6c-9cb4-c2885ca1dd90 service nova] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] No waiting events found dispatching network-vif-plugged-3e8852b6-74d6-48df-920f-ee0169a7772e {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1112.651670] env[63538]: WARNING nova.compute.manager [req-a3b64118-6007-43dc-9518-50eaab913aa3 req-016f84f2-27e9-4f6c-9cb4-c2885ca1dd90 service nova] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Received unexpected event network-vif-plugged-3e8852b6-74d6-48df-920f-ee0169a7772e for instance with vm_state building and task_state block_device_mapping. [ 1112.716413] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6695df6a-0000-4350-9a6d-bf55e657d16e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.723806] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79f8eb43-b2b2-4cac-8ee4-23838ffd2b60 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.762500] env[63538]: DEBUG nova.network.neutron [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Successfully updated port: 3e8852b6-74d6-48df-920f-ee0169a7772e {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1112.764299] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afcb9bae-3f61-4ea1-ab6f-7ceccb5c77a7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.775026] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf77446-cd4c-467d-bea0-8dab1a801768 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.792636] env[63538]: DEBUG nova.compute.provider_tree [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1112.950556] env[63538]: DEBUG nova.compute.manager [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1113.032581] env[63538]: DEBUG nova.network.neutron [req-3b6ad79b-8d6a-415f-af60-8437cc5edd5b req-ccdf49e7-a661-4a6d-9b08-daee85ace3fe service nova] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Updated VIF entry in instance network info cache for port c8b86754-970c-4f8a-b3fb-ec8fb42d3863. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1113.032581] env[63538]: DEBUG nova.network.neutron [req-3b6ad79b-8d6a-415f-af60-8437cc5edd5b req-ccdf49e7-a661-4a6d-9b08-daee85ace3fe service nova] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Updating instance_info_cache with network_info: [{"id": "c8b86754-970c-4f8a-b3fb-ec8fb42d3863", "address": "fa:16:3e:e0:99:90", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8b86754-97", "ovs_interfaceid": "c8b86754-970c-4f8a-b3fb-ec8fb42d3863", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.040804] env[63538]: INFO nova.compute.manager [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Took 15.02 seconds to build instance. [ 1113.074055] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]528224d3-c808-1146-00ce-635e7076804d, 'name': SearchDatastore_Task, 'duration_secs': 0.047934} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.074055] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1113.074347] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1113.074504] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1113.074638] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.074785] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1113.075078] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-492b65b3-0519-4243-98af-4eb5142305fb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.085371] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1113.085609] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1113.086313] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92f0bf5c-ce00-44c0-9ad5-3015993826f9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.092284] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1113.092284] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b7f656-893d-46fa-f8e6-4e84fd5d4adc" [ 1113.092284] env[63538]: _type = "Task" [ 1113.092284] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.100629] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 431a67e6-b90d-4930-9a86-7c49d1022ddc] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1113.102379] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b7f656-893d-46fa-f8e6-4e84fd5d4adc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.110995] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101697, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.598196} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.111329] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] d00151c1-ca34-4c57-9ed2-74d506a0cffb/d00151c1-ca34-4c57-9ed2-74d506a0cffb.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1113.111518] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1113.111765] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b292fe8e-74d9-4ef8-b01d-9f2972a81515 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.118676] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1113.118676] env[63538]: value = "task-5101698" [ 1113.118676] env[63538]: _type = "Task" [ 1113.118676] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.129019] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101698, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.207240] env[63538]: DEBUG nova.compute.manager [req-2f31db57-0edb-4ee4-90b9-2481094791a5 req-7e43a116-ce13-4d97-b497-8817a9ec60b1 service nova] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Received event network-vif-deleted-8826b736-8295-4d09-8211-ccda578b133e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1113.207240] env[63538]: INFO nova.compute.manager [req-2f31db57-0edb-4ee4-90b9-2481094791a5 req-7e43a116-ce13-4d97-b497-8817a9ec60b1 service nova] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Neutron deleted interface 8826b736-8295-4d09-8211-ccda578b133e; detaching it from the instance and deleting it from the info cache [ 1113.207240] env[63538]: DEBUG nova.network.neutron [req-2f31db57-0edb-4ee4-90b9-2481094791a5 req-7e43a116-ce13-4d97-b497-8817a9ec60b1 service nova] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.207841] env[63538]: WARNING oslo_messaging._drivers.amqpdriver [req-2f31db57-0edb-4ee4-90b9-2481094791a5 req-7e43a116-ce13-4d97-b497-8817a9ec60b1 service nova] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1113.271025] env[63538]: DEBUG oslo_concurrency.lockutils [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Acquiring lock "refresh_cache-aaf52cad-86fd-42df-8ee3-13724e3f5e94" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1113.271025] env[63538]: DEBUG oslo_concurrency.lockutils [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Acquired lock "refresh_cache-aaf52cad-86fd-42df-8ee3-13724e3f5e94" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.271025] env[63538]: DEBUG nova.network.neutron [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1113.297425] env[63538]: DEBUG nova.scheduler.client.report [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1113.322014] env[63538]: DEBUG nova.compute.manager [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Stashing vm_state: stopped {{(pid=63538) _prep_resize /opt/stack/nova/nova/compute/manager.py:5670}} [ 1113.469944] env[63538]: DEBUG nova.compute.manager [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1113.469944] env[63538]: DEBUG nova.virt.hardware [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1113.469944] env[63538]: DEBUG nova.virt.hardware [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1113.469944] env[63538]: DEBUG nova.virt.hardware [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1113.469944] env[63538]: DEBUG nova.virt.hardware [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1113.470498] env[63538]: DEBUG nova.virt.hardware [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1113.470498] env[63538]: DEBUG nova.virt.hardware [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1113.470820] env[63538]: DEBUG nova.virt.hardware [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1113.470820] env[63538]: DEBUG nova.virt.hardware [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1113.470942] env[63538]: DEBUG nova.virt.hardware [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1113.470992] env[63538]: DEBUG nova.virt.hardware [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1113.471190] env[63538]: DEBUG nova.virt.hardware [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1113.472439] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-772d9dda-fb89-4879-a077-1844e6bfe537 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.481280] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59235949-098e-403a-9fab-a69743a2f750 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.539383] env[63538]: DEBUG oslo_concurrency.lockutils [req-3b6ad79b-8d6a-415f-af60-8437cc5edd5b req-ccdf49e7-a661-4a6d-9b08-daee85ace3fe service nova] Releasing lock "refresh_cache-e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1113.543077] env[63538]: DEBUG oslo_concurrency.lockutils [None req-aeeb77d9-17f5-49c9-b1fa-bc1c729dd00d tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "4387a3ec-0f0b-4917-97f3-08c737bee4e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.543s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1113.603567] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 0339c969-ad97-47b1-8fab-ee595738d9df] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1113.605608] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b7f656-893d-46fa-f8e6-4e84fd5d4adc, 'name': SearchDatastore_Task, 'duration_secs': 0.00935} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.606675] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17f03386-ecb3-48a2-98b0-41fed57ba7da {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.613385] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1113.613385] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5214f9be-ac9f-7dcb-cf3f-a09ba3748ce1" [ 1113.613385] env[63538]: _type = "Task" [ 1113.613385] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.622280] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5214f9be-ac9f-7dcb-cf3f-a09ba3748ce1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.631037] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101698, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.290366} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.632310] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1113.632310] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64de7e59-90b4-4a9e-a7f0-33d4efd16d4d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.655603] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] d00151c1-ca34-4c57-9ed2-74d506a0cffb/d00151c1-ca34-4c57-9ed2-74d506a0cffb.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1113.655912] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-205967c5-dfbe-493c-846a-eb921c27a444 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.670735] env[63538]: DEBUG nova.network.neutron [-] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.678754] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1113.678754] env[63538]: value = "task-5101699" [ 1113.678754] env[63538]: _type = "Task" [ 1113.678754] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.688180] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101699, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.709913] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5bb0ae10-a1eb-4d8e-a796-27a9d39695de {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.722175] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-852cee82-917e-4d14-807d-6fb08a104770 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.759296] env[63538]: DEBUG nova.compute.manager [req-2f31db57-0edb-4ee4-90b9-2481094791a5 req-7e43a116-ce13-4d97-b497-8817a9ec60b1 service nova] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Detach interface failed, port_id=8826b736-8295-4d09-8211-ccda578b133e, reason: Instance 6257bf5c-8a1c-4204-9605-cc07491e14ea could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1113.803061] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.870s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1113.833152] env[63538]: INFO nova.scheduler.client.report [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Deleted allocations for instance 17350ce4-555b-4f00-9a75-de32a4453141 [ 1113.839984] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1113.839984] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1113.842494] env[63538]: DEBUG nova.network.neutron [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1113.961025] env[63538]: DEBUG nova.compute.manager [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1113.995996] env[63538]: DEBUG nova.virt.hardware [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1113.996306] env[63538]: DEBUG nova.virt.hardware [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1113.996560] env[63538]: DEBUG nova.virt.hardware [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1113.996768] env[63538]: DEBUG nova.virt.hardware [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1113.996954] env[63538]: DEBUG nova.virt.hardware [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1113.997190] env[63538]: DEBUG nova.virt.hardware [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1113.997471] env[63538]: DEBUG nova.virt.hardware [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1113.997648] env[63538]: DEBUG nova.virt.hardware [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1113.997955] env[63538]: DEBUG nova.virt.hardware [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1113.998180] env[63538]: DEBUG nova.virt.hardware [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1113.998401] env[63538]: DEBUG nova.virt.hardware [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1113.999349] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ec7578-897e-41da-ba1a-97ec5542c696 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.008877] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f499c21-0b4e-4b47-b3bd-86d91350ff49 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.024079] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Instance VIF info [] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1114.030564] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Creating folder: Project (dae83eccacb742b7b4313a6c17477d5a). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1114.031534] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9c14cb70-23c5-4327-beb2-77f68b1665e9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.044082] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Created folder: Project (dae83eccacb742b7b4313a6c17477d5a) in parent group-v992234. [ 1114.044321] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Creating folder: Instances. Parent ref: group-v992516. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1114.044578] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-05ed9990-7b15-4a28-9cf3-dae7b3a39ccd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.055785] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Created folder: Instances in parent group-v992516. [ 1114.056270] env[63538]: DEBUG oslo.service.loopingcall [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1114.056412] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1114.056572] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-24361c3e-96e7-44bc-ac07-e17901fc6313 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.084411] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1114.084411] env[63538]: value = "task-5101702" [ 1114.084411] env[63538]: _type = "Task" [ 1114.084411] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.099060] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101702, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.106868] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 209c5f46-9c63-4f55-bc75-bc2e4da989ac] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1114.128461] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5214f9be-ac9f-7dcb-cf3f-a09ba3748ce1, 'name': SearchDatastore_Task, 'duration_secs': 0.010671} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.128800] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1114.129323] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] e0d5a3b2-21e1-4de0-ac10-1a5687a60c10/e0d5a3b2-21e1-4de0-ac10-1a5687a60c10.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1114.129635] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-70386437-665b-4cd3-9446-8354541bbf28 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.138812] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1114.138812] env[63538]: value = "task-5101703" [ 1114.138812] env[63538]: _type = "Task" [ 1114.138812] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.153659] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101703, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.174218] env[63538]: INFO nova.compute.manager [-] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Took 1.92 seconds to deallocate network for instance. [ 1114.185584] env[63538]: DEBUG nova.network.neutron [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Updating instance_info_cache with network_info: [{"id": "3e8852b6-74d6-48df-920f-ee0169a7772e", "address": "fa:16:3e:19:f8:87", "network": {"id": "8716f694-8f95-4130-a2ed-2d60a1849ebd", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-164096703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eaf2a7f857d342f1923d44141fa59cfe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e8852b6-74", "ovs_interfaceid": "3e8852b6-74d6-48df-920f-ee0169a7772e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1114.191693] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101699, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.341587] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9e63a1ca-ee49-44b7-8637-5c2d8f95e104 tempest-ServerDiskConfigTestJSON-1284737605 tempest-ServerDiskConfigTestJSON-1284737605-project-member] Lock "17350ce4-555b-4f00-9a75-de32a4453141" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.636s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1114.348458] env[63538]: INFO nova.compute.claims [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1114.599875] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101702, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.610635] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 6bc30d96-8056-421c-875b-c24488e5f595] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1114.650025] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101703, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.684626] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.690761] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101699, 'name': ReconfigVM_Task, 'duration_secs': 0.562197} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.691042] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Reconfigured VM instance instance-0000006a to attach disk [datastore1] d00151c1-ca34-4c57-9ed2-74d506a0cffb/d00151c1-ca34-4c57-9ed2-74d506a0cffb.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1114.691672] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7797949f-9c8a-4074-b1e0-5a45a479e0a8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.693540] env[63538]: DEBUG oslo_concurrency.lockutils [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Releasing lock "refresh_cache-aaf52cad-86fd-42df-8ee3-13724e3f5e94" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1114.693838] env[63538]: DEBUG nova.compute.manager [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Instance network_info: |[{"id": "3e8852b6-74d6-48df-920f-ee0169a7772e", "address": "fa:16:3e:19:f8:87", "network": {"id": "8716f694-8f95-4130-a2ed-2d60a1849ebd", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-164096703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eaf2a7f857d342f1923d44141fa59cfe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e8852b6-74", "ovs_interfaceid": "3e8852b6-74d6-48df-920f-ee0169a7772e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1114.694216] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:f8:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3e05affa-2640-435e-a124-0ee8a6ab1152', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3e8852b6-74d6-48df-920f-ee0169a7772e', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1114.702422] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Creating folder: Project (eaf2a7f857d342f1923d44141fa59cfe). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1114.703235] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a167c441-53cf-47a3-aa89-12e97b063a05 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.709394] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1114.709394] env[63538]: value = "task-5101704" [ 1114.709394] env[63538]: _type = "Task" [ 1114.709394] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.718749] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101704, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.719896] env[63538]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1114.720056] env[63538]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=63538) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1114.720552] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "049518bd-d569-491a-8f79-6f0b78cf44b2" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.720776] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "049518bd-d569-491a-8f79-6f0b78cf44b2" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.720970] env[63538]: DEBUG nova.compute.manager [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Going to confirm migration 6 {{(pid=63538) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1114.722366] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Folder already exists: Project (eaf2a7f857d342f1923d44141fa59cfe). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1114.725957] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Creating folder: Instances. Parent ref: group-v992503. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1114.725957] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-51eca67c-9efb-4a26-a7d6-ea02c51186d3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.734153] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Created folder: Instances in parent group-v992503. [ 1114.734430] env[63538]: DEBUG oslo.service.loopingcall [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1114.734636] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1114.734849] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-51229ea2-c193-4b68-ae22-7ac8b9526f00 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.756788] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1114.756788] env[63538]: value = "task-5101707" [ 1114.756788] env[63538]: _type = "Task" [ 1114.756788] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.766068] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101707, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.855408] env[63538]: INFO nova.compute.resource_tracker [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Updating resource usage from migration 86475eec-5cad-4cff-8794-87181adb12b6 [ 1114.969759] env[63538]: DEBUG nova.compute.manager [req-42177711-09c7-44f0-bbdc-446a4cf38dc3 req-7e3ce2e4-8ca7-41d7-a257-273f88042023 service nova] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Received event network-changed-3e8852b6-74d6-48df-920f-ee0169a7772e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1114.969963] env[63538]: DEBUG nova.compute.manager [req-42177711-09c7-44f0-bbdc-446a4cf38dc3 req-7e3ce2e4-8ca7-41d7-a257-273f88042023 service nova] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Refreshing instance network info cache due to event network-changed-3e8852b6-74d6-48df-920f-ee0169a7772e. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1114.970209] env[63538]: DEBUG oslo_concurrency.lockutils [req-42177711-09c7-44f0-bbdc-446a4cf38dc3 req-7e3ce2e4-8ca7-41d7-a257-273f88042023 service nova] Acquiring lock "refresh_cache-aaf52cad-86fd-42df-8ee3-13724e3f5e94" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1114.970361] env[63538]: DEBUG oslo_concurrency.lockutils [req-42177711-09c7-44f0-bbdc-446a4cf38dc3 req-7e3ce2e4-8ca7-41d7-a257-273f88042023 service nova] Acquired lock "refresh_cache-aaf52cad-86fd-42df-8ee3-13724e3f5e94" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.970767] env[63538]: DEBUG nova.network.neutron [req-42177711-09c7-44f0-bbdc-446a4cf38dc3 req-7e3ce2e4-8ca7-41d7-a257-273f88042023 service nova] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Refreshing network info cache for port 3e8852b6-74d6-48df-920f-ee0169a7772e {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1115.080448] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73251910-ae68-45c8-a615-cd3ee973427e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.098078] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f873fb3-1092-4f03-a5d7-257cda72d1c2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.112927] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101702, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.140577] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: cf72ac3d-4051-428a-b5bc-7f28accb13c0] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1115.151039] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2bb3e30-bdf4-4bff-8cb3-25efb739e121 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.162608] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101703, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.523527} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.163718] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df43bc57-0aaf-4813-ab62-f9900e17c3c6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.168561] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] e0d5a3b2-21e1-4de0-ac10-1a5687a60c10/e0d5a3b2-21e1-4de0-ac10-1a5687a60c10.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1115.168561] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1115.168834] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5263e985-b18b-4e90-b769-1c00b0b55f86 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.183361] env[63538]: DEBUG nova.compute.provider_tree [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1115.186495] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1115.186495] env[63538]: value = "task-5101708" [ 1115.186495] env[63538]: _type = "Task" [ 1115.186495] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.195808] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101708, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.220459] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101704, 'name': Rename_Task, 'duration_secs': 0.176213} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.220746] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1115.221079] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-519f30b0-8794-4c3f-bce9-a622aafc386d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.229557] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1115.229557] env[63538]: value = "task-5101709" [ 1115.229557] env[63538]: _type = "Task" [ 1115.229557] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.238166] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101709, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.271609] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101707, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.283728] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "refresh_cache-049518bd-d569-491a-8f79-6f0b78cf44b2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1115.283897] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquired lock "refresh_cache-049518bd-d569-491a-8f79-6f0b78cf44b2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.284113] env[63538]: DEBUG nova.network.neutron [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1115.284399] env[63538]: DEBUG nova.objects.instance [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lazy-loading 'info_cache' on Instance uuid 049518bd-d569-491a-8f79-6f0b78cf44b2 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1115.493541] env[63538]: DEBUG nova.compute.manager [req-58559173-a3be-4c1f-855a-124bd1e4dfc5 req-05dec0fa-e7fd-4a15-9302-6ff8faf02815 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Received event network-changed-dfaa4640-ae2a-444b-aa92-e24dd9eca692 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1115.493742] env[63538]: DEBUG nova.compute.manager [req-58559173-a3be-4c1f-855a-124bd1e4dfc5 req-05dec0fa-e7fd-4a15-9302-6ff8faf02815 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Refreshing instance network info cache due to event network-changed-dfaa4640-ae2a-444b-aa92-e24dd9eca692. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1115.493960] env[63538]: DEBUG oslo_concurrency.lockutils [req-58559173-a3be-4c1f-855a-124bd1e4dfc5 req-05dec0fa-e7fd-4a15-9302-6ff8faf02815 service nova] Acquiring lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1115.496592] env[63538]: DEBUG oslo_concurrency.lockutils [req-58559173-a3be-4c1f-855a-124bd1e4dfc5 req-05dec0fa-e7fd-4a15-9302-6ff8faf02815 service nova] Acquired lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.496829] env[63538]: DEBUG nova.network.neutron [req-58559173-a3be-4c1f-855a-124bd1e4dfc5 req-05dec0fa-e7fd-4a15-9302-6ff8faf02815 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Refreshing network info cache for port dfaa4640-ae2a-444b-aa92-e24dd9eca692 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1115.606338] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101702, 'name': CreateVM_Task, 'duration_secs': 1.475522} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.606659] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1115.607149] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1115.607372] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.607701] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1115.608126] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97065b29-a019-430b-a297-980b3d5ccf18 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.613155] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Waiting for the task: (returnval){ [ 1115.613155] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52eb6933-1496-623d-e168-1dc3132a288e" [ 1115.613155] env[63538]: _type = "Task" [ 1115.613155] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.622087] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52eb6933-1496-623d-e168-1dc3132a288e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.647297] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 8097cb1c-bbba-45a8-be81-64d38decb1df] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1115.688471] env[63538]: DEBUG nova.scheduler.client.report [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1115.702946] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101708, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070206} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.702946] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1115.704300] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29586a4e-31d8-453e-84e5-099b5887e203 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.733814] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] e0d5a3b2-21e1-4de0-ac10-1a5687a60c10/e0d5a3b2-21e1-4de0-ac10-1a5687a60c10.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1115.734406] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5bf80a23-d77b-415a-bece-d0018171e690 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.779144] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101709, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.785872] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101707, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.786351] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1115.786351] env[63538]: value = "task-5101710" [ 1115.786351] env[63538]: _type = "Task" [ 1115.786351] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.797462] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101710, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.824305] env[63538]: DEBUG nova.network.neutron [req-42177711-09c7-44f0-bbdc-446a4cf38dc3 req-7e3ce2e4-8ca7-41d7-a257-273f88042023 service nova] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Updated VIF entry in instance network info cache for port 3e8852b6-74d6-48df-920f-ee0169a7772e. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1115.824305] env[63538]: DEBUG nova.network.neutron [req-42177711-09c7-44f0-bbdc-446a4cf38dc3 req-7e3ce2e4-8ca7-41d7-a257-273f88042023 service nova] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Updating instance_info_cache with network_info: [{"id": "3e8852b6-74d6-48df-920f-ee0169a7772e", "address": "fa:16:3e:19:f8:87", "network": {"id": "8716f694-8f95-4130-a2ed-2d60a1849ebd", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-164096703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eaf2a7f857d342f1923d44141fa59cfe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e8852b6-74", "ovs_interfaceid": "3e8852b6-74d6-48df-920f-ee0169a7772e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.128471] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52eb6933-1496-623d-e168-1dc3132a288e, 'name': SearchDatastore_Task, 'duration_secs': 0.012162} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.128471] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1116.128471] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1116.128471] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1116.128471] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1116.128471] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1116.129494] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9a88251d-8421-4056-9e8b-f5206f207e64 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.143316] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1116.143316] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1116.143316] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f82affb6-e210-40a7-8eb1-78c0aeb7e984 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.149745] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Waiting for the task: (returnval){ [ 1116.149745] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52aadd42-7f00-0ee2-17df-777a3a82986d" [ 1116.149745] env[63538]: _type = "Task" [ 1116.149745] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.150754] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 8ed0bd15-71fc-435e-9e4a-90b023ad8a79] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1116.163594] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52aadd42-7f00-0ee2-17df-777a3a82986d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.201997] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.362s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.202249] env[63538]: INFO nova.compute.manager [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Migrating [ 1116.210059] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.525s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1116.213760] env[63538]: DEBUG nova.objects.instance [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Lazy-loading 'resources' on Instance uuid 6257bf5c-8a1c-4204-9605-cc07491e14ea {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1116.249802] env[63538]: DEBUG oslo_vmware.api [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101709, 'name': PowerOnVM_Task, 'duration_secs': 0.987755} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.250088] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1116.250313] env[63538]: INFO nova.compute.manager [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Took 9.33 seconds to spawn the instance on the hypervisor. [ 1116.250501] env[63538]: DEBUG nova.compute.manager [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1116.251846] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91761f85-82ef-4def-9515-ff65be945c08 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.279021] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101707, 'name': CreateVM_Task, 'duration_secs': 1.221603} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.279263] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1116.280794] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992508', 'volume_id': 'f66d7fd1-7450-49dd-aedf-14fdf30f2e90', 'name': 'volume-f66d7fd1-7450-49dd-aedf-14fdf30f2e90', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'aaf52cad-86fd-42df-8ee3-13724e3f5e94', 'attached_at': '', 'detached_at': '', 'volume_id': 'f66d7fd1-7450-49dd-aedf-14fdf30f2e90', 'serial': 'f66d7fd1-7450-49dd-aedf-14fdf30f2e90'}, 'delete_on_termination': True, 'attachment_id': 'c348083d-f8d2-452b-8be5-9129cc137333', 'guest_format': None, 'mount_device': '/dev/sda', 'device_type': None, 'boot_index': 0, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=63538) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1116.281057] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Root volume attach. Driver type: vmdk {{(pid=63538) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1116.281951] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8599039b-6866-4e30-8c27-7a81c5ed3123 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.298589] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c7a3f5-261f-4f7a-b275-d105d56b70cc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.305943] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101710, 'name': ReconfigVM_Task, 'duration_secs': 0.430479} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.307058] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Reconfigured VM instance instance-0000006b to attach disk [datastore2] e0d5a3b2-21e1-4de0-ac10-1a5687a60c10/e0d5a3b2-21e1-4de0-ac10-1a5687a60c10.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1116.308746] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d36a501f-8209-4d95-8ec3-b8b6af052ba3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.313677] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c9336e4-0742-460e-bb23-25d03e7114cb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.322778] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1116.322778] env[63538]: value = "task-5101711" [ 1116.322778] env[63538]: _type = "Task" [ 1116.322778] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.329381] env[63538]: DEBUG oslo_concurrency.lockutils [req-42177711-09c7-44f0-bbdc-446a4cf38dc3 req-7e3ce2e4-8ca7-41d7-a257-273f88042023 service nova] Releasing lock "refresh_cache-aaf52cad-86fd-42df-8ee3-13724e3f5e94" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1116.331052] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-4eea8e8a-221b-4447-b212-531d125763d7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.338249] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101711, 'name': Rename_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.346127] env[63538]: DEBUG oslo_vmware.api [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Waiting for the task: (returnval){ [ 1116.346127] env[63538]: value = "task-5101712" [ 1116.346127] env[63538]: _type = "Task" [ 1116.346127] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.357307] env[63538]: DEBUG oslo_vmware.api [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101712, 'name': RelocateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.661175] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 90e56075-0d77-467f-90be-913315b63b33] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1116.672670] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52aadd42-7f00-0ee2-17df-777a3a82986d, 'name': SearchDatastore_Task, 'duration_secs': 0.013842} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.673788] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-356ab35c-5ebb-4a42-81fe-dd8170e00431 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.681365] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Waiting for the task: (returnval){ [ 1116.681365] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52daf034-7701-5cb7-ad9d-1f94b9531aa0" [ 1116.681365] env[63538]: _type = "Task" [ 1116.681365] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.694058] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52daf034-7701-5cb7-ad9d-1f94b9531aa0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.709567] env[63538]: DEBUG nova.network.neutron [req-58559173-a3be-4c1f-855a-124bd1e4dfc5 req-05dec0fa-e7fd-4a15-9302-6ff8faf02815 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Updated VIF entry in instance network info cache for port dfaa4640-ae2a-444b-aa92-e24dd9eca692. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1116.709567] env[63538]: DEBUG nova.network.neutron [req-58559173-a3be-4c1f-855a-124bd1e4dfc5 req-05dec0fa-e7fd-4a15-9302-6ff8faf02815 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Updating instance_info_cache with network_info: [{"id": "dfaa4640-ae2a-444b-aa92-e24dd9eca692", "address": "fa:16:3e:f0:24:40", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfaa4640-ae", "ovs_interfaceid": "dfaa4640-ae2a-444b-aa92-e24dd9eca692", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.734164] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "refresh_cache-3d80dc17-e330-4575-8e12-e06d8e76274a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1116.734164] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquired lock "refresh_cache-3d80dc17-e330-4575-8e12-e06d8e76274a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1116.734164] env[63538]: DEBUG nova.network.neutron [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1116.775621] env[63538]: INFO nova.compute.manager [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Took 16.19 seconds to build instance. [ 1116.833776] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101711, 'name': Rename_Task, 'duration_secs': 0.222749} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.834376] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1116.835205] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-172aa25f-1d25-4f69-a804-ad05fbbf8851 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.853480] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1116.853480] env[63538]: value = "task-5101713" [ 1116.853480] env[63538]: _type = "Task" [ 1116.853480] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.866746] env[63538]: DEBUG oslo_vmware.api [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101712, 'name': RelocateVM_Task} progress is 35%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.874730] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101713, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.934756] env[63538]: DEBUG nova.network.neutron [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Updating instance_info_cache with network_info: [{"id": "da39877d-c305-4a70-8310-b2ad992f0cc7", "address": "fa:16:3e:e7:40:0a", "network": {"id": "01450801-f549-4844-80bd-208ec405c65f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684310085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "492427e54e1048f292dab2abdac71af5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32463b6d-4569-4755-8a29-873a028690a7", "external-id": "nsx-vlan-transportzone-349", "segmentation_id": 349, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda39877d-c3", "ovs_interfaceid": "da39877d-c305-4a70-8310-b2ad992f0cc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.991120] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c183fa-e032-46ab-b5d3-c0882683e53a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.003194] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cadb88c-58e6-4b11-8dcd-7b8257b2f50d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.042134] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c03c69d-127b-4883-868b-699c13c2969d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.052851] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c4c9f4-cc88-4f33-ac0e-76bc96053e17 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.070866] env[63538]: DEBUG nova.compute.provider_tree [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1117.166739] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 4ec5d3a2-8b29-4074-b323-f94704043b8b] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1117.194139] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52daf034-7701-5cb7-ad9d-1f94b9531aa0, 'name': SearchDatastore_Task, 'duration_secs': 0.011797} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.194139] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1117.194658] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 12beddad-1f19-4cee-b885-3079e3603ba3/12beddad-1f19-4cee-b885-3079e3603ba3.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1117.194746] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-df357a30-6eea-4cde-83ab-dcb3d9774211 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.204012] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Waiting for the task: (returnval){ [ 1117.204012] env[63538]: value = "task-5101714" [ 1117.204012] env[63538]: _type = "Task" [ 1117.204012] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.211053] env[63538]: DEBUG oslo_concurrency.lockutils [req-58559173-a3be-4c1f-855a-124bd1e4dfc5 req-05dec0fa-e7fd-4a15-9302-6ff8faf02815 service nova] Releasing lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1117.211443] env[63538]: DEBUG nova.compute.manager [req-58559173-a3be-4c1f-855a-124bd1e4dfc5 req-05dec0fa-e7fd-4a15-9302-6ff8faf02815 service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Received event network-changed-f31eeedc-851d-457f-8464-c8562fdeaf87 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1117.211749] env[63538]: DEBUG nova.compute.manager [req-58559173-a3be-4c1f-855a-124bd1e4dfc5 req-05dec0fa-e7fd-4a15-9302-6ff8faf02815 service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Refreshing instance network info cache due to event network-changed-f31eeedc-851d-457f-8464-c8562fdeaf87. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1117.211921] env[63538]: DEBUG oslo_concurrency.lockutils [req-58559173-a3be-4c1f-855a-124bd1e4dfc5 req-05dec0fa-e7fd-4a15-9302-6ff8faf02815 service nova] Acquiring lock "refresh_cache-4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1117.212099] env[63538]: DEBUG oslo_concurrency.lockutils [req-58559173-a3be-4c1f-855a-124bd1e4dfc5 req-05dec0fa-e7fd-4a15-9302-6ff8faf02815 service nova] Acquired lock "refresh_cache-4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.212332] env[63538]: DEBUG nova.network.neutron [req-58559173-a3be-4c1f-855a-124bd1e4dfc5 req-05dec0fa-e7fd-4a15-9302-6ff8faf02815 service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Refreshing network info cache for port f31eeedc-851d-457f-8464-c8562fdeaf87 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1117.223937] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101714, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.278675] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bf294917-e938-4c78-9f32-d7dc9d794ebf tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.700s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.364024] env[63538]: DEBUG oslo_vmware.api [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101712, 'name': RelocateVM_Task} progress is 49%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.371505] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101713, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.435699] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Releasing lock "refresh_cache-049518bd-d569-491a-8f79-6f0b78cf44b2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1117.436056] env[63538]: DEBUG nova.objects.instance [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lazy-loading 'migration_context' on Instance uuid 049518bd-d569-491a-8f79-6f0b78cf44b2 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1117.574781] env[63538]: DEBUG nova.scheduler.client.report [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1117.673706] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: f5d92749-04d6-4935-8dc6-afb692222df0] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1117.720538] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101714, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.819830] env[63538]: DEBUG nova.compute.manager [req-c69f3c4d-52dd-40e8-937b-5a9980ccd382 req-043b0c94-5f6e-480e-87fc-e83fa62bf8a4 service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Received event network-changed-f31eeedc-851d-457f-8464-c8562fdeaf87 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1117.820065] env[63538]: DEBUG nova.compute.manager [req-c69f3c4d-52dd-40e8-937b-5a9980ccd382 req-043b0c94-5f6e-480e-87fc-e83fa62bf8a4 service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Refreshing instance network info cache due to event network-changed-f31eeedc-851d-457f-8464-c8562fdeaf87. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1117.820277] env[63538]: DEBUG oslo_concurrency.lockutils [req-c69f3c4d-52dd-40e8-937b-5a9980ccd382 req-043b0c94-5f6e-480e-87fc-e83fa62bf8a4 service nova] Acquiring lock "refresh_cache-4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1117.844723] env[63538]: DEBUG nova.network.neutron [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Updating instance_info_cache with network_info: [{"id": "2bf5c751-02ce-4e9e-8e98-68c3505b8aec", "address": "fa:16:3e:90:72:79", "network": {"id": "690e6150-6e85-472d-baeb-85e69afd037b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1831468396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a06b7cc1ab24ba584bbe970e4fc5e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bf5c751-02", "ovs_interfaceid": "2bf5c751-02ce-4e9e-8e98-68c3505b8aec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1117.865586] env[63538]: DEBUG oslo_vmware.api [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101712, 'name': RelocateVM_Task} progress is 63%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.875560] env[63538]: DEBUG oslo_vmware.api [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101713, 'name': PowerOnVM_Task, 'duration_secs': 0.781945} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.875884] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1117.876132] env[63538]: INFO nova.compute.manager [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Took 8.60 seconds to spawn the instance on the hypervisor. [ 1117.876327] env[63538]: DEBUG nova.compute.manager [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1117.877244] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b40241-fbcc-42c6-9063-68b79684ac5e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.940586] env[63538]: DEBUG nova.objects.base [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Object Instance<049518bd-d569-491a-8f79-6f0b78cf44b2> lazy-loaded attributes: info_cache,migration_context {{(pid=63538) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1117.941277] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd150e59-09c4-4c59-9f12-c35eb0ea6845 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.965570] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ca4b3e5-9857-489a-83d6-80a1912a3d5a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.974554] env[63538]: DEBUG oslo_vmware.api [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1117.974554] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5246f960-f200-5a70-86fb-a60d12378c0d" [ 1117.974554] env[63538]: _type = "Task" [ 1117.974554] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.989952] env[63538]: DEBUG oslo_vmware.api [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5246f960-f200-5a70-86fb-a60d12378c0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.084974] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.875s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1118.178034] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: b47925eb-3d97-415b-9410-2e325da5ce79] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1118.225639] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101714, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565116} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.229632] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 12beddad-1f19-4cee-b885-3079e3603ba3/12beddad-1f19-4cee-b885-3079e3603ba3.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1118.229632] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1118.230058] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0a4fef22-b18f-4285-875b-891cfa33d364 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.247867] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Waiting for the task: (returnval){ [ 1118.247867] env[63538]: value = "task-5101715" [ 1118.247867] env[63538]: _type = "Task" [ 1118.247867] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.263971] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101715, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.265734] env[63538]: INFO nova.scheduler.client.report [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Deleted allocations for instance 6257bf5c-8a1c-4204-9605-cc07491e14ea [ 1118.347804] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Releasing lock "refresh_cache-3d80dc17-e330-4575-8e12-e06d8e76274a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1118.364644] env[63538]: DEBUG oslo_vmware.api [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101712, 'name': RelocateVM_Task} progress is 76%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.404933] env[63538]: INFO nova.compute.manager [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Took 17.33 seconds to build instance. [ 1118.465601] env[63538]: DEBUG nova.network.neutron [req-58559173-a3be-4c1f-855a-124bd1e4dfc5 req-05dec0fa-e7fd-4a15-9302-6ff8faf02815 service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Updated VIF entry in instance network info cache for port f31eeedc-851d-457f-8464-c8562fdeaf87. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1118.465985] env[63538]: DEBUG nova.network.neutron [req-58559173-a3be-4c1f-855a-124bd1e4dfc5 req-05dec0fa-e7fd-4a15-9302-6ff8faf02815 service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Updating instance_info_cache with network_info: [{"id": "f31eeedc-851d-457f-8464-c8562fdeaf87", "address": "fa:16:3e:8d:d6:64", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf31eeedc-85", "ovs_interfaceid": "f31eeedc-851d-457f-8464-c8562fdeaf87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.489779] env[63538]: DEBUG oslo_vmware.api [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5246f960-f200-5a70-86fb-a60d12378c0d, 'name': SearchDatastore_Task, 'duration_secs': 0.010901} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.490497] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1118.490804] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1118.680782] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: e447c109-4cef-4cc7-9acf-61abc0f47482] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1118.761326] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101715, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094196} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.761642] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1118.764515] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a7e139-a148-4fbf-9065-85d701fcde22 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.791946] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] 12beddad-1f19-4cee-b885-3079e3603ba3/12beddad-1f19-4cee-b885-3079e3603ba3.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1118.792708] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f4c95ebe-c7f4-42f4-ac18-cf99e51bd292 tempest-ServersTestJSON-1673399815 tempest-ServersTestJSON-1673399815-project-member] Lock "6257bf5c-8a1c-4204-9605-cc07491e14ea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.221s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1118.794129] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9291049a-705e-4b47-aa73-ace14c0536e6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.824290] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Waiting for the task: (returnval){ [ 1118.824290] env[63538]: value = "task-5101716" [ 1118.824290] env[63538]: _type = "Task" [ 1118.824290] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.837047] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101716, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.869594] env[63538]: DEBUG oslo_vmware.api [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101712, 'name': RelocateVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.908525] env[63538]: DEBUG oslo_concurrency.lockutils [None req-54638683-67fc-46bb-af88-1d9d29e42f67 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.853s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1118.973872] env[63538]: DEBUG oslo_concurrency.lockutils [req-58559173-a3be-4c1f-855a-124bd1e4dfc5 req-05dec0fa-e7fd-4a15-9302-6ff8faf02815 service nova] Releasing lock "refresh_cache-4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1118.974385] env[63538]: DEBUG oslo_concurrency.lockutils [req-c69f3c4d-52dd-40e8-937b-5a9980ccd382 req-043b0c94-5f6e-480e-87fc-e83fa62bf8a4 service nova] Acquired lock "refresh_cache-4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.974600] env[63538]: DEBUG nova.network.neutron [req-c69f3c4d-52dd-40e8-937b-5a9980ccd382 req-043b0c94-5f6e-480e-87fc-e83fa62bf8a4 service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Refreshing network info cache for port f31eeedc-851d-457f-8464-c8562fdeaf87 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1119.186399] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: f1838794-710c-4bea-9e73-f6912e1b69f5] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1119.241178] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3140af75-8ad8-47ef-b18d-b5bd2b57df02 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.250807] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d8447d-8abf-4b75-9380-c6613e6d8659 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.285691] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e87ffda-3eb5-4caf-83de-40d78fa2caf4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.294789] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd65e7f-cb3a-477c-9bd2-5621d48141a7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.310311] env[63538]: DEBUG nova.compute.provider_tree [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1119.335372] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101716, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.369032] env[63538]: DEBUG oslo_vmware.api [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101712, 'name': RelocateVM_Task} progress is 97%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.448267] env[63538]: DEBUG nova.compute.manager [req-0c904c5c-e884-4bfa-93a1-ed0372ce8d8f req-6d8f6e6b-94bf-4e19-a249-858490a60926 service nova] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Received event network-changed-c8b86754-970c-4f8a-b3fb-ec8fb42d3863 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1119.448267] env[63538]: DEBUG nova.compute.manager [req-0c904c5c-e884-4bfa-93a1-ed0372ce8d8f req-6d8f6e6b-94bf-4e19-a249-858490a60926 service nova] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Refreshing instance network info cache due to event network-changed-c8b86754-970c-4f8a-b3fb-ec8fb42d3863. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1119.448469] env[63538]: DEBUG oslo_concurrency.lockutils [req-0c904c5c-e884-4bfa-93a1-ed0372ce8d8f req-6d8f6e6b-94bf-4e19-a249-858490a60926 service nova] Acquiring lock "refresh_cache-e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1119.448668] env[63538]: DEBUG oslo_concurrency.lockutils [req-0c904c5c-e884-4bfa-93a1-ed0372ce8d8f req-6d8f6e6b-94bf-4e19-a249-858490a60926 service nova] Acquired lock "refresh_cache-e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1119.448866] env[63538]: DEBUG nova.network.neutron [req-0c904c5c-e884-4bfa-93a1-ed0372ce8d8f req-6d8f6e6b-94bf-4e19-a249-858490a60926 service nova] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Refreshing network info cache for port c8b86754-970c-4f8a-b3fb-ec8fb42d3863 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1119.691272] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: edcc5700-7b1e-494a-82d1-844373a9d5a6] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1119.813652] env[63538]: DEBUG nova.scheduler.client.report [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1119.836040] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101716, 'name': ReconfigVM_Task, 'duration_secs': 0.650487} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.838111] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Reconfigured VM instance instance-0000006d to attach disk [datastore1] 12beddad-1f19-4cee-b885-3079e3603ba3/12beddad-1f19-4cee-b885-3079e3603ba3.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1119.838111] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-80870339-5d9b-46c8-83d8-08355974e490 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.845745] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Waiting for the task: (returnval){ [ 1119.845745] env[63538]: value = "task-5101717" [ 1119.845745] env[63538]: _type = "Task" [ 1119.845745] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.862734] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101717, 'name': Rename_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.866260] env[63538]: DEBUG oslo_vmware.api [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101712, 'name': RelocateVM_Task} progress is 98%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.873779] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f494cc-0af0-4351-b577-84f3efb8a2bb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.894564] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Updating instance '3d80dc17-e330-4575-8e12-e06d8e76274a' progress to 0 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1120.041041] env[63538]: DEBUG nova.network.neutron [req-c69f3c4d-52dd-40e8-937b-5a9980ccd382 req-043b0c94-5f6e-480e-87fc-e83fa62bf8a4 service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Updated VIF entry in instance network info cache for port f31eeedc-851d-457f-8464-c8562fdeaf87. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1120.041041] env[63538]: DEBUG nova.network.neutron [req-c69f3c4d-52dd-40e8-937b-5a9980ccd382 req-043b0c94-5f6e-480e-87fc-e83fa62bf8a4 service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Updating instance_info_cache with network_info: [{"id": "f31eeedc-851d-457f-8464-c8562fdeaf87", "address": "fa:16:3e:8d:d6:64", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf31eeedc-85", "ovs_interfaceid": "f31eeedc-851d-457f-8464-c8562fdeaf87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1120.056013] env[63538]: DEBUG nova.compute.manager [req-c80abff1-2400-407b-b61f-768abd855a1c req-ca971eca-c5db-462e-aeaa-bf944ba0ecbb service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Received event network-changed-319cec1c-49eb-43a4-a9ec-6b74a507b6d6 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1120.056289] env[63538]: DEBUG nova.compute.manager [req-c80abff1-2400-407b-b61f-768abd855a1c req-ca971eca-c5db-462e-aeaa-bf944ba0ecbb service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Refreshing instance network info cache due to event network-changed-319cec1c-49eb-43a4-a9ec-6b74a507b6d6. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1120.056515] env[63538]: DEBUG oslo_concurrency.lockutils [req-c80abff1-2400-407b-b61f-768abd855a1c req-ca971eca-c5db-462e-aeaa-bf944ba0ecbb service nova] Acquiring lock "refresh_cache-d00151c1-ca34-4c57-9ed2-74d506a0cffb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1120.056686] env[63538]: DEBUG oslo_concurrency.lockutils [req-c80abff1-2400-407b-b61f-768abd855a1c req-ca971eca-c5db-462e-aeaa-bf944ba0ecbb service nova] Acquired lock "refresh_cache-d00151c1-ca34-4c57-9ed2-74d506a0cffb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.056859] env[63538]: DEBUG nova.network.neutron [req-c80abff1-2400-407b-b61f-768abd855a1c req-ca971eca-c5db-462e-aeaa-bf944ba0ecbb service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Refreshing network info cache for port 319cec1c-49eb-43a4-a9ec-6b74a507b6d6 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1120.196463] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 0df15328-aebd-44c5-9c78-ee05f188ad95] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1120.361182] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101717, 'name': Rename_Task, 'duration_secs': 0.165991} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.361917] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1120.362050] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-99973bbd-1ce5-46f2-b251-33a4074808f8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.366805] env[63538]: DEBUG oslo_vmware.api [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101712, 'name': RelocateVM_Task, 'duration_secs': 3.612257} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.371066] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Volume attach. Driver type: vmdk {{(pid=63538) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1120.371066] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992508', 'volume_id': 'f66d7fd1-7450-49dd-aedf-14fdf30f2e90', 'name': 'volume-f66d7fd1-7450-49dd-aedf-14fdf30f2e90', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'aaf52cad-86fd-42df-8ee3-13724e3f5e94', 'attached_at': '', 'detached_at': '', 'volume_id': 'f66d7fd1-7450-49dd-aedf-14fdf30f2e90', 'serial': 'f66d7fd1-7450-49dd-aedf-14fdf30f2e90'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1120.371533] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b87f40-af89-4b40-a004-5512ac2719c3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.374573] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Waiting for the task: (returnval){ [ 1120.374573] env[63538]: value = "task-5101718" [ 1120.374573] env[63538]: _type = "Task" [ 1120.374573] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.393830] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a71f4df-6cc7-4ac8-8be1-bb384f32e01f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.402335] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1120.402666] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101718, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.403368] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9816ddff-a7ef-4f39-8af2-684e504d32ec {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.425330] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] volume-f66d7fd1-7450-49dd-aedf-14fdf30f2e90/volume-f66d7fd1-7450-49dd-aedf-14fdf30f2e90.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1120.426290] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9cce5f0f-7ba8-409f-8005-0091fd631163 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.444512] env[63538]: DEBUG oslo_vmware.api [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1120.444512] env[63538]: value = "task-5101719" [ 1120.444512] env[63538]: _type = "Task" [ 1120.444512] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.451672] env[63538]: DEBUG nova.network.neutron [req-0c904c5c-e884-4bfa-93a1-ed0372ce8d8f req-6d8f6e6b-94bf-4e19-a249-858490a60926 service nova] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Updated VIF entry in instance network info cache for port c8b86754-970c-4f8a-b3fb-ec8fb42d3863. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1120.451672] env[63538]: DEBUG nova.network.neutron [req-0c904c5c-e884-4bfa-93a1-ed0372ce8d8f req-6d8f6e6b-94bf-4e19-a249-858490a60926 service nova] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Updating instance_info_cache with network_info: [{"id": "c8b86754-970c-4f8a-b3fb-ec8fb42d3863", "address": "fa:16:3e:e0:99:90", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8b86754-97", "ovs_interfaceid": "c8b86754-970c-4f8a-b3fb-ec8fb42d3863", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1120.455054] env[63538]: DEBUG oslo_vmware.api [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Waiting for the task: (returnval){ [ 1120.455054] env[63538]: value = "task-5101720" [ 1120.455054] env[63538]: _type = "Task" [ 1120.455054] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.461234] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] VM already powered off {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1120.463543] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Updating instance '3d80dc17-e330-4575-8e12-e06d8e76274a' progress to 17 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1120.468501] env[63538]: DEBUG oslo_vmware.api [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101720, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.543073] env[63538]: DEBUG oslo_concurrency.lockutils [req-c69f3c4d-52dd-40e8-937b-5a9980ccd382 req-043b0c94-5f6e-480e-87fc-e83fa62bf8a4 service nova] Releasing lock "refresh_cache-4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1120.543434] env[63538]: DEBUG nova.compute.manager [req-c69f3c4d-52dd-40e8-937b-5a9980ccd382 req-043b0c94-5f6e-480e-87fc-e83fa62bf8a4 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Received event network-changed-dfaa4640-ae2a-444b-aa92-e24dd9eca692 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1120.543660] env[63538]: DEBUG nova.compute.manager [req-c69f3c4d-52dd-40e8-937b-5a9980ccd382 req-043b0c94-5f6e-480e-87fc-e83fa62bf8a4 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Refreshing instance network info cache due to event network-changed-dfaa4640-ae2a-444b-aa92-e24dd9eca692. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1120.543869] env[63538]: DEBUG oslo_concurrency.lockutils [req-c69f3c4d-52dd-40e8-937b-5a9980ccd382 req-043b0c94-5f6e-480e-87fc-e83fa62bf8a4 service nova] Acquiring lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1120.544027] env[63538]: DEBUG oslo_concurrency.lockutils [req-c69f3c4d-52dd-40e8-937b-5a9980ccd382 req-043b0c94-5f6e-480e-87fc-e83fa62bf8a4 service nova] Acquired lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.544219] env[63538]: DEBUG nova.network.neutron [req-c69f3c4d-52dd-40e8-937b-5a9980ccd382 req-043b0c94-5f6e-480e-87fc-e83fa62bf8a4 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Refreshing network info cache for port dfaa4640-ae2a-444b-aa92-e24dd9eca692 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1120.700423] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: fa8ed101-914d-4751-ab9b-f68ad5da7a56] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1120.832459] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.339s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.889211] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101718, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.957660] env[63538]: DEBUG oslo_concurrency.lockutils [req-0c904c5c-e884-4bfa-93a1-ed0372ce8d8f req-6d8f6e6b-94bf-4e19-a249-858490a60926 service nova] Releasing lock "refresh_cache-e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1120.973795] env[63538]: DEBUG nova.virt.hardware [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1120.974269] env[63538]: DEBUG nova.virt.hardware [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1120.978014] env[63538]: DEBUG nova.virt.hardware [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1120.978014] env[63538]: DEBUG nova.virt.hardware [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1120.978014] env[63538]: DEBUG nova.virt.hardware [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1120.978014] env[63538]: DEBUG nova.virt.hardware [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1120.978014] env[63538]: DEBUG nova.virt.hardware [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1120.978014] env[63538]: DEBUG nova.virt.hardware [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1120.978014] env[63538]: DEBUG nova.virt.hardware [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1120.978014] env[63538]: DEBUG nova.virt.hardware [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1120.978014] env[63538]: DEBUG nova.virt.hardware [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1120.985360] env[63538]: DEBUG oslo_vmware.api [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101720, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.985988] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a884d464-00af-4754-b43f-7ab60a7f61bb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.008982] env[63538]: DEBUG oslo_vmware.api [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1121.008982] env[63538]: value = "task-5101721" [ 1121.008982] env[63538]: _type = "Task" [ 1121.008982] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.025814] env[63538]: DEBUG oslo_vmware.api [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101721, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.203913] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 1db1d558-2473-49cb-b309-f7192bd6b9c1] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1121.296172] env[63538]: DEBUG nova.network.neutron [req-c80abff1-2400-407b-b61f-768abd855a1c req-ca971eca-c5db-462e-aeaa-bf944ba0ecbb service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Updated VIF entry in instance network info cache for port 319cec1c-49eb-43a4-a9ec-6b74a507b6d6. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1121.296172] env[63538]: DEBUG nova.network.neutron [req-c80abff1-2400-407b-b61f-768abd855a1c req-ca971eca-c5db-462e-aeaa-bf944ba0ecbb service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Updating instance_info_cache with network_info: [{"id": "319cec1c-49eb-43a4-a9ec-6b74a507b6d6", "address": "fa:16:3e:ce:c8:c1", "network": {"id": "8fd1ecf4-3813-4ca4-82b4-8ba3f04e3c41", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1232144260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fe11c1386b14d139f4416cbf20fb201", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap319cec1c-49", "ovs_interfaceid": "319cec1c-49eb-43a4-a9ec-6b74a507b6d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1121.387830] env[63538]: DEBUG oslo_vmware.api [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101718, 'name': PowerOnVM_Task, 'duration_secs': 0.726794} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.389863] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1121.390166] env[63538]: INFO nova.compute.manager [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Took 7.43 seconds to spawn the instance on the hypervisor. [ 1121.391051] env[63538]: DEBUG nova.compute.manager [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1121.392557] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e8406a-3347-47a5-9620-87e8d677c707 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.404658] env[63538]: INFO nova.scheduler.client.report [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Deleted allocation for migration 8a556cae-5667-4483-8ca3-57542b6380a6 [ 1121.469858] env[63538]: DEBUG oslo_vmware.api [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101720, 'name': ReconfigVM_Task, 'duration_secs': 0.608482} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.471205] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Reconfigured VM instance instance-0000006c to attach disk [datastore1] volume-f66d7fd1-7450-49dd-aedf-14fdf30f2e90/volume-f66d7fd1-7450-49dd-aedf-14fdf30f2e90.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1121.476572] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b56b9e3-642a-4cfa-9fed-fd74d330a272 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.497541] env[63538]: DEBUG oslo_vmware.api [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Waiting for the task: (returnval){ [ 1121.497541] env[63538]: value = "task-5101722" [ 1121.497541] env[63538]: _type = "Task" [ 1121.497541] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.507674] env[63538]: DEBUG oslo_vmware.api [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101722, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.520055] env[63538]: DEBUG oslo_vmware.api [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101721, 'name': ReconfigVM_Task, 'duration_secs': 0.189676} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.520489] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Updating instance '3d80dc17-e330-4575-8e12-e06d8e76274a' progress to 33 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1121.632263] env[63538]: DEBUG nova.network.neutron [req-c69f3c4d-52dd-40e8-937b-5a9980ccd382 req-043b0c94-5f6e-480e-87fc-e83fa62bf8a4 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Updated VIF entry in instance network info cache for port dfaa4640-ae2a-444b-aa92-e24dd9eca692. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1121.632618] env[63538]: DEBUG nova.network.neutron [req-c69f3c4d-52dd-40e8-937b-5a9980ccd382 req-043b0c94-5f6e-480e-87fc-e83fa62bf8a4 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Updating instance_info_cache with network_info: [{"id": "dfaa4640-ae2a-444b-aa92-e24dd9eca692", "address": "fa:16:3e:f0:24:40", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfaa4640-ae", "ovs_interfaceid": "dfaa4640-ae2a-444b-aa92-e24dd9eca692", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1121.707996] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: b0b4ae9c-95d3-47a1-86a7-120c88b60704] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1121.801789] env[63538]: DEBUG oslo_concurrency.lockutils [req-c80abff1-2400-407b-b61f-768abd855a1c req-ca971eca-c5db-462e-aeaa-bf944ba0ecbb service nova] Releasing lock "refresh_cache-d00151c1-ca34-4c57-9ed2-74d506a0cffb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1121.913878] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "049518bd-d569-491a-8f79-6f0b78cf44b2" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.193s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.917146] env[63538]: INFO nova.compute.manager [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Took 16.09 seconds to build instance. [ 1122.007730] env[63538]: DEBUG oslo_vmware.api [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101722, 'name': ReconfigVM_Task, 'duration_secs': 0.457057} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.008053] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992508', 'volume_id': 'f66d7fd1-7450-49dd-aedf-14fdf30f2e90', 'name': 'volume-f66d7fd1-7450-49dd-aedf-14fdf30f2e90', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'aaf52cad-86fd-42df-8ee3-13724e3f5e94', 'attached_at': '', 'detached_at': '', 'volume_id': 'f66d7fd1-7450-49dd-aedf-14fdf30f2e90', 'serial': 'f66d7fd1-7450-49dd-aedf-14fdf30f2e90'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1122.008619] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-586adc04-83f7-4953-ab1e-db79668a219d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.015223] env[63538]: DEBUG oslo_vmware.api [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Waiting for the task: (returnval){ [ 1122.015223] env[63538]: value = "task-5101723" [ 1122.015223] env[63538]: _type = "Task" [ 1122.015223] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.023673] env[63538]: DEBUG oslo_vmware.api [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101723, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.028013] env[63538]: DEBUG nova.virt.hardware [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1122.028286] env[63538]: DEBUG nova.virt.hardware [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1122.028460] env[63538]: DEBUG nova.virt.hardware [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1122.028963] env[63538]: DEBUG nova.virt.hardware [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1122.028963] env[63538]: DEBUG nova.virt.hardware [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1122.029110] env[63538]: DEBUG nova.virt.hardware [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1122.029604] env[63538]: DEBUG nova.virt.hardware [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1122.029604] env[63538]: DEBUG nova.virt.hardware [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1122.029723] env[63538]: DEBUG nova.virt.hardware [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1122.029905] env[63538]: DEBUG nova.virt.hardware [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1122.030170] env[63538]: DEBUG nova.virt.hardware [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1122.036355] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Reconfiguring VM instance instance-00000059 to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1122.036899] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa848f90-8285-4979-8513-092d097b2fea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.058089] env[63538]: DEBUG oslo_vmware.api [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1122.058089] env[63538]: value = "task-5101724" [ 1122.058089] env[63538]: _type = "Task" [ 1122.058089] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.072468] env[63538]: DEBUG oslo_vmware.api [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101724, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.135740] env[63538]: DEBUG oslo_concurrency.lockutils [req-c69f3c4d-52dd-40e8-937b-5a9980ccd382 req-043b0c94-5f6e-480e-87fc-e83fa62bf8a4 service nova] Releasing lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1122.211618] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: edc670dd-732a-4c54-924c-c99ee539d4d9] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1122.420261] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d16042df-1908-4496-9955-a3510945712f tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Lock "12beddad-1f19-4cee-b885-3079e3603ba3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.600s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1122.530048] env[63538]: DEBUG oslo_vmware.api [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101723, 'name': Rename_Task, 'duration_secs': 0.164498} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.530362] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1122.530862] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c20a6ca9-2044-43ae-bc33-52b861504554 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.539632] env[63538]: DEBUG oslo_vmware.api [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Waiting for the task: (returnval){ [ 1122.539632] env[63538]: value = "task-5101725" [ 1122.539632] env[63538]: _type = "Task" [ 1122.539632] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.558625] env[63538]: DEBUG oslo_vmware.api [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101725, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.568456] env[63538]: DEBUG oslo_vmware.api [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101724, 'name': ReconfigVM_Task, 'duration_secs': 0.182347} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.568770] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Reconfigured VM instance instance-00000059 to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1122.569619] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51bdf4ea-4197-4e07-ba8d-019842f2bc11 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.610430] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] 3d80dc17-e330-4575-8e12-e06d8e76274a/3d80dc17-e330-4575-8e12-e06d8e76274a.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1122.611135] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd7c4f8d-36d2-4bdb-88fe-88f1117ef06a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.643149] env[63538]: DEBUG oslo_vmware.api [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1122.643149] env[63538]: value = "task-5101726" [ 1122.643149] env[63538]: _type = "Task" [ 1122.643149] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.653524] env[63538]: DEBUG oslo_vmware.api [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101726, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.715341] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: e79a9eeb-a4c4-4613-bc43-4e40103addf9] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1122.774118] env[63538]: INFO nova.compute.manager [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Rebuilding instance [ 1122.837018] env[63538]: DEBUG nova.compute.manager [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1122.837018] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fba3c8b-bfe6-43b0-9747-d004f3ad5266 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.051930] env[63538]: DEBUG oslo_vmware.api [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101725, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.154997] env[63538]: DEBUG oslo_vmware.api [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101726, 'name': ReconfigVM_Task, 'duration_secs': 0.301233} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.155339] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Reconfigured VM instance instance-00000059 to attach disk [datastore2] 3d80dc17-e330-4575-8e12-e06d8e76274a/3d80dc17-e330-4575-8e12-e06d8e76274a.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1123.155630] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Updating instance '3d80dc17-e330-4575-8e12-e06d8e76274a' progress to 50 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1123.219402] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 5ed2a02c-816e-45b6-bd33-eed5f8ab0d7a] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1123.263613] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "049518bd-d569-491a-8f79-6f0b78cf44b2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1123.263900] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "049518bd-d569-491a-8f79-6f0b78cf44b2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.264154] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "049518bd-d569-491a-8f79-6f0b78cf44b2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1123.264620] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "049518bd-d569-491a-8f79-6f0b78cf44b2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.264857] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "049518bd-d569-491a-8f79-6f0b78cf44b2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1123.267084] env[63538]: INFO nova.compute.manager [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Terminating instance [ 1123.268968] env[63538]: DEBUG nova.compute.manager [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1123.269189] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1123.270040] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4681878-9e7c-4a93-a4ef-ce2be92b8e92 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.278552] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1123.278829] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-04b08527-e796-42c5-b047-f43cf2c8e083 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.285255] env[63538]: DEBUG oslo_vmware.api [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1123.285255] env[63538]: value = "task-5101727" [ 1123.285255] env[63538]: _type = "Task" [ 1123.285255] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.295828] env[63538]: DEBUG oslo_vmware.api [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101727, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.336960] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Acquiring lock "4e07fbfb-cae0-440d-8f75-c76cce3f7d00" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1123.337219] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Lock "4e07fbfb-cae0-440d-8f75-c76cce3f7d00" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.351163] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1123.352271] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6f86c1ce-be43-4dd5-a4f6-6697b3d0ebe7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.360608] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Waiting for the task: (returnval){ [ 1123.360608] env[63538]: value = "task-5101728" [ 1123.360608] env[63538]: _type = "Task" [ 1123.360608] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.372203] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101728, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.550807] env[63538]: DEBUG oslo_vmware.api [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101725, 'name': PowerOnVM_Task, 'duration_secs': 0.725985} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.551140] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1123.551388] env[63538]: INFO nova.compute.manager [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Took 10.08 seconds to spawn the instance on the hypervisor. [ 1123.551636] env[63538]: DEBUG nova.compute.manager [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1123.552481] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8761420f-bf26-4c83-8333-7b553d6e01c4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.662978] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb27572-c902-471d-83e9-0aa31c2abf27 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.683791] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d8a47b-90a2-4116-bb8e-cf9bf0612dc8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.704864] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Updating instance '3d80dc17-e330-4575-8e12-e06d8e76274a' progress to 67 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1123.723305] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 376ee3d9-e8b5-4f47-9622-b873126b492e] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1123.796220] env[63538]: DEBUG oslo_vmware.api [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101727, 'name': PowerOffVM_Task, 'duration_secs': 0.269969} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.796628] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1123.796836] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1123.797123] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c9b5e30e-7f4a-486b-88af-ece66906b858 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.840199] env[63538]: DEBUG nova.compute.manager [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1123.870890] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101728, 'name': PowerOffVM_Task, 'duration_secs': 0.195111} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.871711] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1123.871711] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1123.872566] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca2c07e4-e75b-470b-a846-80fe6d7092a7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.882308] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1123.882712] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f0284dfd-4698-445f-927b-09124e86c79c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.886171] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1123.886401] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1123.886686] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Deleting the datastore file [datastore1] 049518bd-d569-491a-8f79-6f0b78cf44b2 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1123.886940] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-25df65c3-a909-42b6-9432-e5694646a9e3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.896205] env[63538]: DEBUG oslo_vmware.api [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for the task: (returnval){ [ 1123.896205] env[63538]: value = "task-5101731" [ 1123.896205] env[63538]: _type = "Task" [ 1123.896205] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.907491] env[63538]: DEBUG oslo_vmware.api [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101731, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.917022] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1123.917022] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1123.917022] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Deleting the datastore file [datastore1] 12beddad-1f19-4cee-b885-3079e3603ba3 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1123.917022] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-721f5096-15ae-4803-ac72-bd50b9ca72b2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.924430] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Waiting for the task: (returnval){ [ 1123.924430] env[63538]: value = "task-5101732" [ 1123.924430] env[63538]: _type = "Task" [ 1123.924430] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.933609] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101732, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.072045] env[63538]: INFO nova.compute.manager [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Took 22.49 seconds to build instance. [ 1124.226085] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 9c1f7da8-59f6-45bc-8d5f-23c8ec760829] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1124.275843] env[63538]: DEBUG nova.network.neutron [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Port 2bf5c751-02ce-4e9e-8e98-68c3505b8aec binding to destination host cpu-1 is already ACTIVE {{(pid=63538) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1124.370469] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1124.370745] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1124.373054] env[63538]: INFO nova.compute.claims [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1124.411121] env[63538]: DEBUG oslo_vmware.api [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Task: {'id': task-5101731, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.183335} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.411594] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1124.411830] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1124.412131] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1124.412411] env[63538]: INFO nova.compute.manager [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1124.412788] env[63538]: DEBUG oslo.service.loopingcall [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1124.413112] env[63538]: DEBUG nova.compute.manager [-] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1124.413317] env[63538]: DEBUG nova.network.neutron [-] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1124.438016] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101732, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099626} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.438429] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1124.438733] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1124.439054] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1124.577601] env[63538]: DEBUG oslo_concurrency.lockutils [None req-babea9b5-a0cb-4530-a528-73e64f2c053f tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Lock "aaf52cad-86fd-42df-8ee3-13724e3f5e94" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.007s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.734272] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: ade3cce6-5662-4199-96f4-398436f840d8] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1124.833073] env[63538]: DEBUG nova.compute.manager [req-34577d09-c854-44e7-b1c1-7a3b6e7f2a6c req-fe35ecc6-d83f-40c5-8637-c6588b60e315 service nova] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Received event network-vif-deleted-da39877d-c305-4a70-8310-b2ad992f0cc7 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1124.833212] env[63538]: INFO nova.compute.manager [req-34577d09-c854-44e7-b1c1-7a3b6e7f2a6c req-fe35ecc6-d83f-40c5-8637-c6588b60e315 service nova] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Neutron deleted interface da39877d-c305-4a70-8310-b2ad992f0cc7; detaching it from the instance and deleting it from the info cache [ 1124.833530] env[63538]: DEBUG nova.network.neutron [req-34577d09-c854-44e7-b1c1-7a3b6e7f2a6c req-fe35ecc6-d83f-40c5-8637-c6588b60e315 service nova] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1125.239391] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 0e718984-cfce-4620-9be6-fdcfb4954da8] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1125.303031] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "3d80dc17-e330-4575-8e12-e06d8e76274a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1125.303208] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "3d80dc17-e330-4575-8e12-e06d8e76274a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1125.303308] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "3d80dc17-e330-4575-8e12-e06d8e76274a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.304433] env[63538]: DEBUG nova.network.neutron [-] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1125.336474] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-436a5ea9-ebf7-4e92-b955-d3e28f8cf639 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.348816] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd7e9d8-4a6b-43e5-8f82-c83e2f96e831 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.387972] env[63538]: DEBUG nova.compute.manager [req-34577d09-c854-44e7-b1c1-7a3b6e7f2a6c req-fe35ecc6-d83f-40c5-8637-c6588b60e315 service nova] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Detach interface failed, port_id=da39877d-c305-4a70-8310-b2ad992f0cc7, reason: Instance 049518bd-d569-491a-8f79-6f0b78cf44b2 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1125.478802] env[63538]: DEBUG nova.virt.hardware [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1125.479075] env[63538]: DEBUG nova.virt.hardware [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1125.479245] env[63538]: DEBUG nova.virt.hardware [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1125.479518] env[63538]: DEBUG nova.virt.hardware [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1125.479683] env[63538]: DEBUG nova.virt.hardware [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1125.479835] env[63538]: DEBUG nova.virt.hardware [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1125.480059] env[63538]: DEBUG nova.virt.hardware [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1125.480229] env[63538]: DEBUG nova.virt.hardware [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1125.480403] env[63538]: DEBUG nova.virt.hardware [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1125.480570] env[63538]: DEBUG nova.virt.hardware [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1125.480747] env[63538]: DEBUG nova.virt.hardware [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1125.481635] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-890991ef-d46b-4813-a352-765d2e552450 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.493941] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9416449-f5da-4e26-a7df-5a10ac713b79 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.520013] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Instance VIF info [] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1125.530403] env[63538]: DEBUG oslo.service.loopingcall [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1125.534747] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1125.535402] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-421d4db0-1576-4cfd-b553-1afd5b23ca97 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.568602] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1125.568602] env[63538]: value = "task-5101733" [ 1125.568602] env[63538]: _type = "Task" [ 1125.568602] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.585767] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101733, 'name': CreateVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.603626] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46afd55d-ce86-4b95-b08e-27c96c7cd813 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.612380] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-075c06a3-e31c-4af6-8720-ea7f51b09a90 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.654361] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da238e9-ad7f-4e29-8c91-f06cc656b2d5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.663806] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825d4b7c-561f-43d0-bd24-3602ed90ce02 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.679230] env[63538]: DEBUG nova.compute.provider_tree [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1125.742674] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: d967631f-5c8a-42d8-ac05-4cec3bdb55cf] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1125.809931] env[63538]: INFO nova.compute.manager [-] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Took 1.40 seconds to deallocate network for instance. [ 1126.079895] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101733, 'name': CreateVM_Task, 'duration_secs': 0.460135} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.080162] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1126.080625] env[63538]: DEBUG oslo_concurrency.lockutils [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1126.080806] env[63538]: DEBUG oslo_concurrency.lockutils [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.081207] env[63538]: DEBUG oslo_concurrency.lockutils [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1126.081513] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9de603f8-0ec5-4e79-b833-dcf8c8fcb524 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.088054] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Waiting for the task: (returnval){ [ 1126.088054] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b7eea6-a842-c6b4-b678-763f34e594bd" [ 1126.088054] env[63538]: _type = "Task" [ 1126.088054] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.098593] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b7eea6-a842-c6b4-b678-763f34e594bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.184506] env[63538]: DEBUG nova.scheduler.client.report [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1126.247375] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 49cc0ba8-2f7e-4bb8-a1e7-de9a695b1cd1] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1126.319399] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.353795] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "refresh_cache-3d80dc17-e330-4575-8e12-e06d8e76274a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1126.354051] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquired lock "refresh_cache-3d80dc17-e330-4575-8e12-e06d8e76274a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.354247] env[63538]: DEBUG nova.network.neutron [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1126.598536] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b7eea6-a842-c6b4-b678-763f34e594bd, 'name': SearchDatastore_Task, 'duration_secs': 0.015563} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.598869] env[63538]: DEBUG oslo_concurrency.lockutils [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1126.599226] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1126.599383] env[63538]: DEBUG oslo_concurrency.lockutils [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1126.599537] env[63538]: DEBUG oslo_concurrency.lockutils [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.599723] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1126.599997] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b1423e0-1375-4f61-ac39-6c45326fce26 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.610971] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1126.611212] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1126.612135] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4893233-fc12-470b-bc08-0bb15b5340cb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.619059] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Waiting for the task: (returnval){ [ 1126.619059] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52078ccc-90f6-b169-1d2d-d33569acfe67" [ 1126.619059] env[63538]: _type = "Task" [ 1126.619059] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.630095] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52078ccc-90f6-b169-1d2d-d33569acfe67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.690288] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.319s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.690772] env[63538]: DEBUG nova.compute.manager [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1126.693369] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.374s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.693575] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.715382] env[63538]: INFO nova.scheduler.client.report [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Deleted allocations for instance 049518bd-d569-491a-8f79-6f0b78cf44b2 [ 1126.750758] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: de68a921-bf67-4794-923d-4e062d8ff802] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1126.862914] env[63538]: DEBUG nova.compute.manager [req-dd35ae92-8013-4dd6-b8ab-4e03f4d20a45 req-2c4e7363-ee5e-481e-bf33-acd5f80bf787 service nova] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Received event network-changed-3e8852b6-74d6-48df-920f-ee0169a7772e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1126.863164] env[63538]: DEBUG nova.compute.manager [req-dd35ae92-8013-4dd6-b8ab-4e03f4d20a45 req-2c4e7363-ee5e-481e-bf33-acd5f80bf787 service nova] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Refreshing instance network info cache due to event network-changed-3e8852b6-74d6-48df-920f-ee0169a7772e. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1126.863385] env[63538]: DEBUG oslo_concurrency.lockutils [req-dd35ae92-8013-4dd6-b8ab-4e03f4d20a45 req-2c4e7363-ee5e-481e-bf33-acd5f80bf787 service nova] Acquiring lock "refresh_cache-aaf52cad-86fd-42df-8ee3-13724e3f5e94" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1126.863534] env[63538]: DEBUG oslo_concurrency.lockutils [req-dd35ae92-8013-4dd6-b8ab-4e03f4d20a45 req-2c4e7363-ee5e-481e-bf33-acd5f80bf787 service nova] Acquired lock "refresh_cache-aaf52cad-86fd-42df-8ee3-13724e3f5e94" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.863701] env[63538]: DEBUG nova.network.neutron [req-dd35ae92-8013-4dd6-b8ab-4e03f4d20a45 req-2c4e7363-ee5e-481e-bf33-acd5f80bf787 service nova] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Refreshing network info cache for port 3e8852b6-74d6-48df-920f-ee0169a7772e {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1127.107158] env[63538]: DEBUG nova.network.neutron [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Updating instance_info_cache with network_info: [{"id": "2bf5c751-02ce-4e9e-8e98-68c3505b8aec", "address": "fa:16:3e:90:72:79", "network": {"id": "690e6150-6e85-472d-baeb-85e69afd037b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1831468396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a06b7cc1ab24ba584bbe970e4fc5e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bf5c751-02", "ovs_interfaceid": "2bf5c751-02ce-4e9e-8e98-68c3505b8aec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1127.132219] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52078ccc-90f6-b169-1d2d-d33569acfe67, 'name': SearchDatastore_Task, 'duration_secs': 0.023069} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.133101] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40bca78a-89f1-424a-bf14-c611bb55229a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.139604] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Waiting for the task: (returnval){ [ 1127.139604] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521a52fa-119b-b6d2-7f3e-91c801f9d3f2" [ 1127.139604] env[63538]: _type = "Task" [ 1127.139604] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.149336] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521a52fa-119b-b6d2-7f3e-91c801f9d3f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.198062] env[63538]: DEBUG nova.compute.utils [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1127.199596] env[63538]: DEBUG nova.compute.manager [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1127.199772] env[63538]: DEBUG nova.network.neutron [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1127.223486] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a48ed84a-289c-4ebb-8a4e-49915fe920bc tempest-DeleteServersTestJSON-312874781 tempest-DeleteServersTestJSON-312874781-project-member] Lock "049518bd-d569-491a-8f79-6f0b78cf44b2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.959s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1127.244578] env[63538]: DEBUG nova.policy [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5634cb03ce174ea0b91883963b3ee90a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '65bc1bc418144f9bbdef50c18595311c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1127.254083] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 87f8bb3e-6f32-4850-ac54-efad0befb268] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1127.612272] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Releasing lock "refresh_cache-3d80dc17-e330-4575-8e12-e06d8e76274a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1127.654256] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521a52fa-119b-b6d2-7f3e-91c801f9d3f2, 'name': SearchDatastore_Task, 'duration_secs': 0.011163} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.654256] env[63538]: DEBUG oslo_concurrency.lockutils [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1127.654256] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 12beddad-1f19-4cee-b885-3079e3603ba3/12beddad-1f19-4cee-b885-3079e3603ba3.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1127.654256] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2f718209-5dbb-4f3d-8944-78c493604fdb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.664813] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Waiting for the task: (returnval){ [ 1127.664813] env[63538]: value = "task-5101734" [ 1127.664813] env[63538]: _type = "Task" [ 1127.664813] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.677637] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101734, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.701534] env[63538]: DEBUG nova.network.neutron [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Successfully created port: fec44204-572f-48af-b2f0-8ba48080ec72 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1127.703729] env[63538]: DEBUG nova.compute.manager [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1127.746524] env[63538]: DEBUG nova.network.neutron [req-dd35ae92-8013-4dd6-b8ab-4e03f4d20a45 req-2c4e7363-ee5e-481e-bf33-acd5f80bf787 service nova] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Updated VIF entry in instance network info cache for port 3e8852b6-74d6-48df-920f-ee0169a7772e. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1127.747032] env[63538]: DEBUG nova.network.neutron [req-dd35ae92-8013-4dd6-b8ab-4e03f4d20a45 req-2c4e7363-ee5e-481e-bf33-acd5f80bf787 service nova] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Updating instance_info_cache with network_info: [{"id": "3e8852b6-74d6-48df-920f-ee0169a7772e", "address": "fa:16:3e:19:f8:87", "network": {"id": "8716f694-8f95-4130-a2ed-2d60a1849ebd", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-164096703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eaf2a7f857d342f1923d44141fa59cfe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e8852b6-74", "ovs_interfaceid": "3e8852b6-74d6-48df-920f-ee0169a7772e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1127.757151] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: d5d557c6-3d4e-4122-8756-218c9757fa01] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1128.148487] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf93a311-b578-42a4-a4ca-41c99f1bccba {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.175508] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3957dc-be31-4bae-99d8-b53985984d20 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.185308] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Updating instance '3d80dc17-e330-4575-8e12-e06d8e76274a' progress to 83 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1128.192502] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101734, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.251906] env[63538]: DEBUG oslo_concurrency.lockutils [req-dd35ae92-8013-4dd6-b8ab-4e03f4d20a45 req-2c4e7363-ee5e-481e-bf33-acd5f80bf787 service nova] Releasing lock "refresh_cache-aaf52cad-86fd-42df-8ee3-13724e3f5e94" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1128.261779] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: a2e036ae-318b-44ea-9db0-10fa3838728b] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1128.684571] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101734, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.546087} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.685025] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 12beddad-1f19-4cee-b885-3079e3603ba3/12beddad-1f19-4cee-b885-3079e3603ba3.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1128.685300] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1128.685701] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-92450469-9257-497f-a5c2-060ebc3aaaeb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.695644] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2074aa-2df3-40cb-b09e-e2f4c1bfd93d tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Updating instance '3d80dc17-e330-4575-8e12-e06d8e76274a' progress to 100 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1128.701962] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Waiting for the task: (returnval){ [ 1128.701962] env[63538]: value = "task-5101736" [ 1128.701962] env[63538]: _type = "Task" [ 1128.701962] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.712256] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101736, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.715698] env[63538]: DEBUG nova.compute.manager [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1128.747367] env[63538]: DEBUG nova.virt.hardware [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1128.747678] env[63538]: DEBUG nova.virt.hardware [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1128.747898] env[63538]: DEBUG nova.virt.hardware [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1128.748052] env[63538]: DEBUG nova.virt.hardware [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1128.748213] env[63538]: DEBUG nova.virt.hardware [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1128.748370] env[63538]: DEBUG nova.virt.hardware [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1128.748593] env[63538]: DEBUG nova.virt.hardware [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1128.748821] env[63538]: DEBUG nova.virt.hardware [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1128.749076] env[63538]: DEBUG nova.virt.hardware [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1128.749268] env[63538]: DEBUG nova.virt.hardware [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1128.749447] env[63538]: DEBUG nova.virt.hardware [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1128.750428] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04dfb352-9889-49a4-942c-d7fb224b3703 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.760549] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335b81ba-0385-4bd1-ae69-eacdc9bc89fc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.765739] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 466be7db-79e4-49fd-aa3b-56fbe5c60457] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1129.193597] env[63538]: DEBUG nova.compute.manager [req-5c9e1a64-543b-4c7b-aae1-5823f7d50215 req-26de2fce-52bb-4a84-9385-7d83ff7c03b3 service nova] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Received event network-vif-plugged-fec44204-572f-48af-b2f0-8ba48080ec72 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1129.193920] env[63538]: DEBUG oslo_concurrency.lockutils [req-5c9e1a64-543b-4c7b-aae1-5823f7d50215 req-26de2fce-52bb-4a84-9385-7d83ff7c03b3 service nova] Acquiring lock "4e07fbfb-cae0-440d-8f75-c76cce3f7d00-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1129.194298] env[63538]: DEBUG oslo_concurrency.lockutils [req-5c9e1a64-543b-4c7b-aae1-5823f7d50215 req-26de2fce-52bb-4a84-9385-7d83ff7c03b3 service nova] Lock "4e07fbfb-cae0-440d-8f75-c76cce3f7d00-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1129.194586] env[63538]: DEBUG oslo_concurrency.lockutils [req-5c9e1a64-543b-4c7b-aae1-5823f7d50215 req-26de2fce-52bb-4a84-9385-7d83ff7c03b3 service nova] Lock "4e07fbfb-cae0-440d-8f75-c76cce3f7d00-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1129.194869] env[63538]: DEBUG nova.compute.manager [req-5c9e1a64-543b-4c7b-aae1-5823f7d50215 req-26de2fce-52bb-4a84-9385-7d83ff7c03b3 service nova] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] No waiting events found dispatching network-vif-plugged-fec44204-572f-48af-b2f0-8ba48080ec72 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1129.195204] env[63538]: WARNING nova.compute.manager [req-5c9e1a64-543b-4c7b-aae1-5823f7d50215 req-26de2fce-52bb-4a84-9385-7d83ff7c03b3 service nova] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Received unexpected event network-vif-plugged-fec44204-572f-48af-b2f0-8ba48080ec72 for instance with vm_state building and task_state spawning. [ 1129.216722] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101736, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.239953} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.217630] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1129.218533] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcceacb0-9ac5-43eb-9d3b-acf7af4a5526 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.251944] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] 12beddad-1f19-4cee-b885-3079e3603ba3/12beddad-1f19-4cee-b885-3079e3603ba3.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1129.252709] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d671f756-db09-4c3f-ac76-528db7715148 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.268615] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: bd222761-92aa-4f2c-a752-ead9c498ee7a] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1129.276707] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Waiting for the task: (returnval){ [ 1129.276707] env[63538]: value = "task-5101737" [ 1129.276707] env[63538]: _type = "Task" [ 1129.276707] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.286890] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101737, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.764397] env[63538]: DEBUG nova.network.neutron [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Successfully updated port: fec44204-572f-48af-b2f0-8ba48080ec72 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1129.772910] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: ede967c0-ec3a-4f26-8290-0ee36890cd75] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1129.788818] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101737, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.791386] env[63538]: DEBUG nova.compute.manager [req-c4ebeb51-00b6-49a2-808f-e3d3e3455181 req-5d5b3928-7a7f-40fa-bd18-bb863723c687 service nova] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Received event network-changed-fec44204-572f-48af-b2f0-8ba48080ec72 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1129.791667] env[63538]: DEBUG nova.compute.manager [req-c4ebeb51-00b6-49a2-808f-e3d3e3455181 req-5d5b3928-7a7f-40fa-bd18-bb863723c687 service nova] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Refreshing instance network info cache due to event network-changed-fec44204-572f-48af-b2f0-8ba48080ec72. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1129.791826] env[63538]: DEBUG oslo_concurrency.lockutils [req-c4ebeb51-00b6-49a2-808f-e3d3e3455181 req-5d5b3928-7a7f-40fa-bd18-bb863723c687 service nova] Acquiring lock "refresh_cache-4e07fbfb-cae0-440d-8f75-c76cce3f7d00" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1129.791951] env[63538]: DEBUG oslo_concurrency.lockutils [req-c4ebeb51-00b6-49a2-808f-e3d3e3455181 req-5d5b3928-7a7f-40fa-bd18-bb863723c687 service nova] Acquired lock "refresh_cache-4e07fbfb-cae0-440d-8f75-c76cce3f7d00" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.792138] env[63538]: DEBUG nova.network.neutron [req-c4ebeb51-00b6-49a2-808f-e3d3e3455181 req-5d5b3928-7a7f-40fa-bd18-bb863723c687 service nova] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Refreshing network info cache for port fec44204-572f-48af-b2f0-8ba48080ec72 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1130.266742] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Acquiring lock "refresh_cache-4e07fbfb-cae0-440d-8f75-c76cce3f7d00" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1130.276566] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: f9fa5578-acf3-416f-9cb0-8ceb00e5132d] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1130.289900] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101737, 'name': ReconfigVM_Task, 'duration_secs': 0.871811} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.291206] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Reconfigured VM instance instance-0000006d to attach disk [datastore1] 12beddad-1f19-4cee-b885-3079e3603ba3/12beddad-1f19-4cee-b885-3079e3603ba3.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1130.291206] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-41562554-a8b1-4700-a9eb-247fa3b06166 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.298987] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Waiting for the task: (returnval){ [ 1130.298987] env[63538]: value = "task-5101738" [ 1130.298987] env[63538]: _type = "Task" [ 1130.298987] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.308525] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101738, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.342773] env[63538]: DEBUG nova.network.neutron [req-c4ebeb51-00b6-49a2-808f-e3d3e3455181 req-5d5b3928-7a7f-40fa-bd18-bb863723c687 service nova] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1130.440443] env[63538]: DEBUG nova.network.neutron [req-c4ebeb51-00b6-49a2-808f-e3d3e3455181 req-5d5b3928-7a7f-40fa-bd18-bb863723c687 service nova] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.482260] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ac61e702-dcab-47ab-8590-360fe88b5b58 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "3d80dc17-e330-4575-8e12-e06d8e76274a" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.482576] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ac61e702-dcab-47ab-8590-360fe88b5b58 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "3d80dc17-e330-4575-8e12-e06d8e76274a" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.482782] env[63538]: DEBUG nova.compute.manager [None req-ac61e702-dcab-47ab-8590-360fe88b5b58 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Going to confirm migration 7 {{(pid=63538) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1130.784646] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 8fb62f47-cbf2-4b46-bc33-845e832f9ef0] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1130.810617] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101738, 'name': Rename_Task, 'duration_secs': 0.185658} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.810872] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1130.811162] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1d1da30e-1597-43d5-af04-9bfd1aeecd47 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.820817] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Waiting for the task: (returnval){ [ 1130.820817] env[63538]: value = "task-5101739" [ 1130.820817] env[63538]: _type = "Task" [ 1130.820817] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.830416] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101739, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.943984] env[63538]: DEBUG oslo_concurrency.lockutils [req-c4ebeb51-00b6-49a2-808f-e3d3e3455181 req-5d5b3928-7a7f-40fa-bd18-bb863723c687 service nova] Releasing lock "refresh_cache-4e07fbfb-cae0-440d-8f75-c76cce3f7d00" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1130.944361] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Acquired lock "refresh_cache-4e07fbfb-cae0-440d-8f75-c76cce3f7d00" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.944581] env[63538]: DEBUG nova.network.neutron [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1131.055985] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ac61e702-dcab-47ab-8590-360fe88b5b58 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "refresh_cache-3d80dc17-e330-4575-8e12-e06d8e76274a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1131.056206] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ac61e702-dcab-47ab-8590-360fe88b5b58 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquired lock "refresh_cache-3d80dc17-e330-4575-8e12-e06d8e76274a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.056393] env[63538]: DEBUG nova.network.neutron [None req-ac61e702-dcab-47ab-8590-360fe88b5b58 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1131.056768] env[63538]: DEBUG nova.objects.instance [None req-ac61e702-dcab-47ab-8590-360fe88b5b58 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lazy-loading 'info_cache' on Instance uuid 3d80dc17-e330-4575-8e12-e06d8e76274a {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1131.288632] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 2e1b0bc7-3909-48e2-b9be-26822a57ee67] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1131.332047] env[63538]: DEBUG oslo_vmware.api [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101739, 'name': PowerOnVM_Task, 'duration_secs': 0.4885} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.332509] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1131.332891] env[63538]: DEBUG nova.compute.manager [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1131.333614] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27107f9b-632a-4835-b052-970757ab38ea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.483533] env[63538]: DEBUG nova.network.neutron [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1131.624958] env[63538]: DEBUG nova.network.neutron [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Updating instance_info_cache with network_info: [{"id": "fec44204-572f-48af-b2f0-8ba48080ec72", "address": "fa:16:3e:a2:6c:91", "network": {"id": "68a99c8d-dff3-4242-9c42-eae1c3a638e3", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1111429219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65bc1bc418144f9bbdef50c18595311c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "81d39ad2-4e62-4f09-a567-88ac5aa70467", "external-id": "nsx-vlan-transportzone-221", "segmentation_id": 221, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfec44204-57", "ovs_interfaceid": "fec44204-572f-48af-b2f0-8ba48080ec72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.792848] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: c8a02fa6-5232-4dde-b6dd-0da1089b6bbf] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1131.820663] env[63538]: DEBUG oslo_concurrency.lockutils [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "interface-42af31f3-a9d0-4fdd-99fa-442ebe915277-880907a0-da53-40af-a1ad-126b284f384c" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.820940] env[63538]: DEBUG oslo_concurrency.lockutils [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "interface-42af31f3-a9d0-4fdd-99fa-442ebe915277-880907a0-da53-40af-a1ad-126b284f384c" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1131.821423] env[63538]: DEBUG nova.objects.instance [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lazy-loading 'flavor' on Instance uuid 42af31f3-a9d0-4fdd-99fa-442ebe915277 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1131.851965] env[63538]: DEBUG oslo_concurrency.lockutils [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.852240] env[63538]: DEBUG oslo_concurrency.lockutils [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1131.852424] env[63538]: DEBUG nova.objects.instance [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63538) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1132.064894] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Acquiring lock "12beddad-1f19-4cee-b885-3079e3603ba3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.065465] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Lock "12beddad-1f19-4cee-b885-3079e3603ba3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.065871] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Acquiring lock "12beddad-1f19-4cee-b885-3079e3603ba3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.066204] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Lock "12beddad-1f19-4cee-b885-3079e3603ba3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.066460] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Lock "12beddad-1f19-4cee-b885-3079e3603ba3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1132.074109] env[63538]: INFO nova.compute.manager [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Terminating instance [ 1132.077612] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Acquiring lock "refresh_cache-12beddad-1f19-4cee-b885-3079e3603ba3" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1132.077990] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Acquired lock "refresh_cache-12beddad-1f19-4cee-b885-3079e3603ba3" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.078326] env[63538]: DEBUG nova.network.neutron [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1132.128644] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Releasing lock "refresh_cache-4e07fbfb-cae0-440d-8f75-c76cce3f7d00" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1132.128803] env[63538]: DEBUG nova.compute.manager [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Instance network_info: |[{"id": "fec44204-572f-48af-b2f0-8ba48080ec72", "address": "fa:16:3e:a2:6c:91", "network": {"id": "68a99c8d-dff3-4242-9c42-eae1c3a638e3", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1111429219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65bc1bc418144f9bbdef50c18595311c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "81d39ad2-4e62-4f09-a567-88ac5aa70467", "external-id": "nsx-vlan-transportzone-221", "segmentation_id": 221, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfec44204-57", "ovs_interfaceid": "fec44204-572f-48af-b2f0-8ba48080ec72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1132.129432] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:6c:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '81d39ad2-4e62-4f09-a567-88ac5aa70467', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fec44204-572f-48af-b2f0-8ba48080ec72', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1132.138695] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Creating folder: Project (65bc1bc418144f9bbdef50c18595311c). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1132.139161] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b57e6a70-eda5-4305-92a6-b813acf0ebdd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.153023] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Created folder: Project (65bc1bc418144f9bbdef50c18595311c) in parent group-v992234. [ 1132.154119] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Creating folder: Instances. Parent ref: group-v992522. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1132.154119] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-74ed198a-1dec-4c68-b175-0609ec9c9cf9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.165499] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Created folder: Instances in parent group-v992522. [ 1132.165943] env[63538]: DEBUG oslo.service.loopingcall [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1132.166227] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1132.166404] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-16a09b54-8d9d-4b08-a6cc-e300683fda96 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.187592] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1132.187592] env[63538]: value = "task-5101742" [ 1132.187592] env[63538]: _type = "Task" [ 1132.187592] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.198392] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101742, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.296773] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.296967] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Cleaning up deleted instances with incomplete migration {{(pid=63538) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11355}} [ 1132.381587] env[63538]: DEBUG nova.network.neutron [None req-ac61e702-dcab-47ab-8590-360fe88b5b58 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Updating instance_info_cache with network_info: [{"id": "2bf5c751-02ce-4e9e-8e98-68c3505b8aec", "address": "fa:16:3e:90:72:79", "network": {"id": "690e6150-6e85-472d-baeb-85e69afd037b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1831468396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a06b7cc1ab24ba584bbe970e4fc5e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bf5c751-02", "ovs_interfaceid": "2bf5c751-02ce-4e9e-8e98-68c3505b8aec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.513318] env[63538]: DEBUG nova.objects.instance [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lazy-loading 'pci_requests' on Instance uuid 42af31f3-a9d0-4fdd-99fa-442ebe915277 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1132.603044] env[63538]: DEBUG nova.network.neutron [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1132.660922] env[63538]: DEBUG nova.network.neutron [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.701235] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101742, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.802488] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.865793] env[63538]: DEBUG oslo_concurrency.lockutils [None req-83f0d77c-7956-4f37-94ea-9f1378db215b tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1132.887629] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ac61e702-dcab-47ab-8590-360fe88b5b58 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Releasing lock "refresh_cache-3d80dc17-e330-4575-8e12-e06d8e76274a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1132.887916] env[63538]: DEBUG nova.objects.instance [None req-ac61e702-dcab-47ab-8590-360fe88b5b58 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lazy-loading 'migration_context' on Instance uuid 3d80dc17-e330-4575-8e12-e06d8e76274a {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1133.017420] env[63538]: DEBUG nova.objects.base [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Object Instance<42af31f3-a9d0-4fdd-99fa-442ebe915277> lazy-loaded attributes: flavor,pci_requests {{(pid=63538) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1133.017632] env[63538]: DEBUG nova.network.neutron [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1133.137164] env[63538]: DEBUG nova.policy [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ebe77c0e32ce4a32b290ba5088e107f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7063c42297c24f2baf7271fa25dec927', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1133.164956] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Releasing lock "refresh_cache-12beddad-1f19-4cee-b885-3079e3603ba3" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1133.165470] env[63538]: DEBUG nova.compute.manager [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1133.165689] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1133.166669] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea354e52-8518-4aaf-922a-f6712c8e760b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.177030] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1133.177530] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3ba19965-3f63-42ff-b91c-5ce97b55046f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.184876] env[63538]: DEBUG oslo_vmware.api [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Waiting for the task: (returnval){ [ 1133.184876] env[63538]: value = "task-5101743" [ 1133.184876] env[63538]: _type = "Task" [ 1133.184876] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.201101] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101742, 'name': CreateVM_Task, 'duration_secs': 0.664421} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.201402] env[63538]: DEBUG oslo_vmware.api [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101743, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.201574] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1133.202346] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1133.202523] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.202898] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1133.203200] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93a03900-13c0-4e7b-9cf8-39cf067abad2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.209635] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Waiting for the task: (returnval){ [ 1133.209635] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52dd19df-7719-0342-7389-1250fd301649" [ 1133.209635] env[63538]: _type = "Task" [ 1133.209635] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.222069] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52dd19df-7719-0342-7389-1250fd301649, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.392346] env[63538]: DEBUG nova.objects.base [None req-ac61e702-dcab-47ab-8590-360fe88b5b58 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Object Instance<3d80dc17-e330-4575-8e12-e06d8e76274a> lazy-loaded attributes: info_cache,migration_context {{(pid=63538) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1133.392346] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e6f45d-0a56-472f-8f70-25e716c980ec {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.413437] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c9b2958-389f-4289-a1a2-95e9b43e14f0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.420803] env[63538]: DEBUG oslo_vmware.api [None req-ac61e702-dcab-47ab-8590-360fe88b5b58 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1133.420803] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5293a524-8c9d-a3c3-4cbe-36df97d95163" [ 1133.420803] env[63538]: _type = "Task" [ 1133.420803] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.429818] env[63538]: DEBUG oslo_vmware.api [None req-ac61e702-dcab-47ab-8590-360fe88b5b58 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5293a524-8c9d-a3c3-4cbe-36df97d95163, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.698412] env[63538]: DEBUG oslo_vmware.api [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101743, 'name': PowerOffVM_Task, 'duration_secs': 0.160542} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.698790] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1133.698980] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1133.699272] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b843a533-7272-48ab-b106-75d861ae6a7f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.722227] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52dd19df-7719-0342-7389-1250fd301649, 'name': SearchDatastore_Task, 'duration_secs': 0.013914} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.722751] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1133.722896] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1133.723212] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1133.723371] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.723580] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1133.725212] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3f3086a3-ee6d-4994-a264-6cd6edecff9f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.728875] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1133.729134] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1133.729379] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Deleting the datastore file [datastore1] 12beddad-1f19-4cee-b885-3079e3603ba3 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1133.729729] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a91551cb-9e6c-4b3c-b282-114205e8c21d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.740187] env[63538]: DEBUG oslo_vmware.api [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Waiting for the task: (returnval){ [ 1133.740187] env[63538]: value = "task-5101745" [ 1133.740187] env[63538]: _type = "Task" [ 1133.740187] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.745746] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1133.745908] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1133.747720] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47ceb3be-1799-42b2-9b64-827e3fc3f894 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.755383] env[63538]: DEBUG oslo_vmware.api [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101745, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.760544] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Waiting for the task: (returnval){ [ 1133.760544] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527c462e-b836-e77c-8008-3a4b5cb2050f" [ 1133.760544] env[63538]: _type = "Task" [ 1133.760544] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.772443] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527c462e-b836-e77c-8008-3a4b5cb2050f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.931920] env[63538]: DEBUG oslo_vmware.api [None req-ac61e702-dcab-47ab-8590-360fe88b5b58 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5293a524-8c9d-a3c3-4cbe-36df97d95163, 'name': SearchDatastore_Task, 'duration_secs': 0.008923} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.932596] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ac61e702-dcab-47ab-8590-360fe88b5b58 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.932596] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ac61e702-dcab-47ab-8590-360fe88b5b58 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.256030] env[63538]: DEBUG oslo_vmware.api [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Task: {'id': task-5101745, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.122634} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.256030] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1134.256030] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1134.256030] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1134.256030] env[63538]: INFO nova.compute.manager [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1134.256030] env[63538]: DEBUG oslo.service.loopingcall [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1134.256030] env[63538]: DEBUG nova.compute.manager [-] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1134.256030] env[63538]: DEBUG nova.network.neutron [-] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1134.272522] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]527c462e-b836-e77c-8008-3a4b5cb2050f, 'name': SearchDatastore_Task, 'duration_secs': 0.021347} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.273716] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-218de0a8-d279-4a9f-b97a-458ffcf52f56 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.283303] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Waiting for the task: (returnval){ [ 1134.283303] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52443cea-382e-60d1-6b95-778546d5e12a" [ 1134.283303] env[63538]: _type = "Task" [ 1134.283303] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.291750] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52443cea-382e-60d1-6b95-778546d5e12a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.306062] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1134.306062] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1134.415611] env[63538]: DEBUG nova.network.neutron [-] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1134.481279] env[63538]: DEBUG oslo_concurrency.lockutils [None req-471f3c1b-1a42-4484-a8d5-70cd64969423 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "048573b4-26db-4a62-81e0-1bc1c3999d02" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1134.481279] env[63538]: DEBUG oslo_concurrency.lockutils [None req-471f3c1b-1a42-4484-a8d5-70cd64969423 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "048573b4-26db-4a62-81e0-1bc1c3999d02" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.003s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.612289] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e01c422f-acae-450a-8f08-8383a2b17d72 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.621431] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f1bf531-6f78-43e6-9374-46cb3d3fa366 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.655249] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecb359c1-0f62-4b94-9e2b-549f79aace40 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.664934] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6278923-f927-4f07-9cfd-ac4cfe4b42a2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.681615] env[63538]: DEBUG nova.compute.provider_tree [None req-ac61e702-dcab-47ab-8590-360fe88b5b58 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1134.792289] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52443cea-382e-60d1-6b95-778546d5e12a, 'name': SearchDatastore_Task, 'duration_secs': 0.012766} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.792577] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1134.792888] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 4e07fbfb-cae0-440d-8f75-c76cce3f7d00/4e07fbfb-cae0-440d-8f75-c76cce3f7d00.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1134.793185] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f8c2bc11-43e0-4ca8-bd5b-0d033de8fa0d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.802194] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Waiting for the task: (returnval){ [ 1134.802194] env[63538]: value = "task-5101746" [ 1134.802194] env[63538]: _type = "Task" [ 1134.802194] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.811300] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1134.811474] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Starting heal instance info cache {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10017}} [ 1134.811574] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Rebuilding the list of instances to heal {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10021}} [ 1134.821372] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Task: {'id': task-5101746, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.918051] env[63538]: DEBUG nova.network.neutron [-] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.960384] env[63538]: DEBUG nova.network.neutron [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Successfully updated port: 880907a0-da53-40af-a1ad-126b284f384c {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1134.990574] env[63538]: DEBUG nova.compute.utils [None req-471f3c1b-1a42-4484-a8d5-70cd64969423 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1135.185983] env[63538]: DEBUG nova.scheduler.client.report [None req-ac61e702-dcab-47ab-8590-360fe88b5b58 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1135.313969] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Task: {'id': task-5101746, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.325311] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Skipping network cache update for instance because it is being deleted. {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10034}} [ 1135.325575] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Skipping network cache update for instance because it is Building. {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10030}} [ 1135.356507] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "refresh_cache-fb26fb32-a420-4667-850c-e32786edd8f2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1135.356777] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquired lock "refresh_cache-fb26fb32-a420-4667-850c-e32786edd8f2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.357114] env[63538]: DEBUG nova.network.neutron [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Forcefully refreshing network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1135.357114] env[63538]: DEBUG nova.objects.instance [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lazy-loading 'info_cache' on Instance uuid fb26fb32-a420-4667-850c-e32786edd8f2 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1135.420361] env[63538]: INFO nova.compute.manager [-] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Took 1.17 seconds to deallocate network for instance. [ 1135.464272] env[63538]: DEBUG oslo_concurrency.lockutils [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1135.464615] env[63538]: DEBUG oslo_concurrency.lockutils [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.468021] env[63538]: DEBUG nova.network.neutron [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1135.494782] env[63538]: DEBUG oslo_concurrency.lockutils [None req-471f3c1b-1a42-4484-a8d5-70cd64969423 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "048573b4-26db-4a62-81e0-1bc1c3999d02" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.013s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1135.814108] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Task: {'id': task-5101746, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.604635} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.814457] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 4e07fbfb-cae0-440d-8f75-c76cce3f7d00/4e07fbfb-cae0-440d-8f75-c76cce3f7d00.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1135.814721] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1135.815042] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ced6619a-9f54-4269-9866-f75116c37554 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.822855] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Waiting for the task: (returnval){ [ 1135.822855] env[63538]: value = "task-5101747" [ 1135.822855] env[63538]: _type = "Task" [ 1135.822855] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.832154] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Task: {'id': task-5101747, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.927410] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.026696] env[63538]: WARNING nova.network.neutron [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] 44a04a43-a979-4648-89b2-63323df5b0f3 already exists in list: networks containing: ['44a04a43-a979-4648-89b2-63323df5b0f3']. ignoring it [ 1136.201264] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ac61e702-dcab-47ab-8590-360fe88b5b58 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.268s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.201417] env[63538]: DEBUG nova.compute.manager [None req-ac61e702-dcab-47ab-8590-360fe88b5b58 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=63538) _confirm_resize /opt/stack/nova/nova/compute/manager.py:4910}} [ 1136.204863] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.278s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.205125] env[63538]: DEBUG nova.objects.instance [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Lazy-loading 'resources' on Instance uuid 12beddad-1f19-4cee-b885-3079e3603ba3 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1136.234738] env[63538]: DEBUG nova.compute.manager [req-a7ace347-8a13-4dc9-b4fb-82b5348323ec req-05acd4dd-ba3a-4a3c-9567-19fa14db92a3 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Received event network-vif-plugged-880907a0-da53-40af-a1ad-126b284f384c {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1136.234738] env[63538]: DEBUG oslo_concurrency.lockutils [req-a7ace347-8a13-4dc9-b4fb-82b5348323ec req-05acd4dd-ba3a-4a3c-9567-19fa14db92a3 service nova] Acquiring lock "42af31f3-a9d0-4fdd-99fa-442ebe915277-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.235028] env[63538]: DEBUG oslo_concurrency.lockutils [req-a7ace347-8a13-4dc9-b4fb-82b5348323ec req-05acd4dd-ba3a-4a3c-9567-19fa14db92a3 service nova] Lock "42af31f3-a9d0-4fdd-99fa-442ebe915277-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.235223] env[63538]: DEBUG oslo_concurrency.lockutils [req-a7ace347-8a13-4dc9-b4fb-82b5348323ec req-05acd4dd-ba3a-4a3c-9567-19fa14db92a3 service nova] Lock "42af31f3-a9d0-4fdd-99fa-442ebe915277-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.235415] env[63538]: DEBUG nova.compute.manager [req-a7ace347-8a13-4dc9-b4fb-82b5348323ec req-05acd4dd-ba3a-4a3c-9567-19fa14db92a3 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] No waiting events found dispatching network-vif-plugged-880907a0-da53-40af-a1ad-126b284f384c {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1136.236137] env[63538]: WARNING nova.compute.manager [req-a7ace347-8a13-4dc9-b4fb-82b5348323ec req-05acd4dd-ba3a-4a3c-9567-19fa14db92a3 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Received unexpected event network-vif-plugged-880907a0-da53-40af-a1ad-126b284f384c for instance with vm_state active and task_state None. [ 1136.340570] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Task: {'id': task-5101747, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084644} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.340882] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1136.341771] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43c1c044-2f20-4f01-93c9-f4c4ebe564e3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.373019] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] 4e07fbfb-cae0-440d-8f75-c76cce3f7d00/4e07fbfb-cae0-440d-8f75-c76cce3f7d00.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1136.376444] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9037ff85-d7d8-4389-b9b5-661693eb5415 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.402543] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Waiting for the task: (returnval){ [ 1136.402543] env[63538]: value = "task-5101748" [ 1136.402543] env[63538]: _type = "Task" [ 1136.402543] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.413865] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Task: {'id': task-5101748, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.462600] env[63538]: DEBUG nova.network.neutron [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Updating instance_info_cache with network_info: [{"id": "dfaa4640-ae2a-444b-aa92-e24dd9eca692", "address": "fa:16:3e:f0:24:40", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfaa4640-ae", "ovs_interfaceid": "dfaa4640-ae2a-444b-aa92-e24dd9eca692", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "880907a0-da53-40af-a1ad-126b284f384c", "address": "fa:16:3e:f4:b1:8b", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap880907a0-da", "ovs_interfaceid": "880907a0-da53-40af-a1ad-126b284f384c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1136.570192] env[63538]: DEBUG oslo_concurrency.lockutils [None req-471f3c1b-1a42-4484-a8d5-70cd64969423 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "048573b4-26db-4a62-81e0-1bc1c3999d02" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.571032] env[63538]: DEBUG oslo_concurrency.lockutils [None req-471f3c1b-1a42-4484-a8d5-70cd64969423 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "048573b4-26db-4a62-81e0-1bc1c3999d02" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.571032] env[63538]: INFO nova.compute.manager [None req-471f3c1b-1a42-4484-a8d5-70cd64969423 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Attaching volume c24d91c9-a15c-486f-a3b8-f0c4b143cda1 to /dev/sdb [ 1136.614450] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-996f0344-eaa6-4bab-8760-9c5779fe2dc3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.624403] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-652b6311-28aa-4a4b-901a-2d9d0eb2b332 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.644958] env[63538]: DEBUG nova.virt.block_device [None req-471f3c1b-1a42-4484-a8d5-70cd64969423 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Updating existing volume attachment record: aaf57902-5c76-468a-879e-bedae3cb068d {{(pid=63538) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1136.783691] env[63538]: INFO nova.scheduler.client.report [None req-ac61e702-dcab-47ab-8590-360fe88b5b58 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Deleted allocation for migration 86475eec-5cad-4cff-8794-87181adb12b6 [ 1136.894870] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8fbb99f-4ec9-4249-933f-77726cae480d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.906926] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53ee5ba7-e4c2-421e-b613-b6e3e0456f9c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.915679] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Task: {'id': task-5101748, 'name': ReconfigVM_Task, 'duration_secs': 0.362451} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.943366] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Reconfigured VM instance instance-0000006e to attach disk [datastore1] 4e07fbfb-cae0-440d-8f75-c76cce3f7d00/4e07fbfb-cae0-440d-8f75-c76cce3f7d00.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1136.944381] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e0bdfd72-7739-4a91-9165-0776024bff6a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.946859] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e62966b-18fe-41e6-9a22-e3820b739bae {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.960273] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dda4c12-a5e1-4597-b5c0-6b8a48722d06 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.964485] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Waiting for the task: (returnval){ [ 1136.964485] env[63538]: value = "task-5101752" [ 1136.964485] env[63538]: _type = "Task" [ 1136.964485] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.965448] env[63538]: DEBUG oslo_concurrency.lockutils [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1136.966065] env[63538]: DEBUG oslo_concurrency.lockutils [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1136.966271] env[63538]: DEBUG oslo_concurrency.lockutils [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.979411] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b153530-e378-4518-b930-2e4bbee4d390 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.983306] env[63538]: DEBUG nova.compute.provider_tree [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1136.991885] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Task: {'id': task-5101752, 'name': Rename_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.005266] env[63538]: DEBUG nova.virt.hardware [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1137.005561] env[63538]: DEBUG nova.virt.hardware [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1137.005725] env[63538]: DEBUG nova.virt.hardware [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1137.005970] env[63538]: DEBUG nova.virt.hardware [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1137.006177] env[63538]: DEBUG nova.virt.hardware [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1137.006376] env[63538]: DEBUG nova.virt.hardware [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1137.006662] env[63538]: DEBUG nova.virt.hardware [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1137.006846] env[63538]: DEBUG nova.virt.hardware [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1137.007056] env[63538]: DEBUG nova.virt.hardware [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1137.007234] env[63538]: DEBUG nova.virt.hardware [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1137.007412] env[63538]: DEBUG nova.virt.hardware [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1137.014312] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Reconfiguring VM to attach interface {{(pid=63538) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1929}} [ 1137.015516] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e785a6ae-d912-4447-8fc5-234c5edf1164 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.039925] env[63538]: DEBUG oslo_vmware.api [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1137.039925] env[63538]: value = "task-5101753" [ 1137.039925] env[63538]: _type = "Task" [ 1137.039925] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.052179] env[63538]: DEBUG oslo_vmware.api [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101753, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.265729] env[63538]: DEBUG nova.network.neutron [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Updating instance_info_cache with network_info: [{"id": "5250918c-5112-49ad-b1d3-f73c2d534637", "address": "fa:16:3e:6d:44:6f", "network": {"id": "4d592811-793b-45d6-a6d3-fff8ca7bbf30", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-654803980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df090f9a727d4cf4a0f466e27928bdc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5250918c-51", "ovs_interfaceid": "5250918c-5112-49ad-b1d3-f73c2d534637", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1137.291599] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ac61e702-dcab-47ab-8590-360fe88b5b58 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "3d80dc17-e330-4575-8e12-e06d8e76274a" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.809s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.477157] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Task: {'id': task-5101752, 'name': Rename_Task, 'duration_secs': 0.176796} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.479311] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1137.483920] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7daa97d8-0b58-441b-8b48-172cdcc1e66e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.485342] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Acquiring lock "0599fa68-1109-4edf-b42e-f81e7f09d641" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1137.485782] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Lock "0599fa68-1109-4edf-b42e-f81e7f09d641" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.488546] env[63538]: DEBUG nova.scheduler.client.report [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1137.502772] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Waiting for the task: (returnval){ [ 1137.502772] env[63538]: value = "task-5101754" [ 1137.502772] env[63538]: _type = "Task" [ 1137.502772] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.519665] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Task: {'id': task-5101754, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.554953] env[63538]: DEBUG oslo_vmware.api [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101753, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.770698] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Releasing lock "refresh_cache-fb26fb32-a420-4667-850c-e32786edd8f2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1137.770698] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Updated the network info_cache for instance {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10088}} [ 1137.770698] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1137.770698] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1137.770698] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1137.770698] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1137.770698] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1137.770698] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1137.770698] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63538) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10636}} [ 1137.770698] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1137.997191] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.792s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.000773] env[63538]: DEBUG nova.compute.manager [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1138.018731] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Task: {'id': task-5101754, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.040046] env[63538]: INFO nova.scheduler.client.report [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Deleted allocations for instance 12beddad-1f19-4cee-b885-3079e3603ba3 [ 1138.055608] env[63538]: DEBUG oslo_vmware.api [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101753, 'name': ReconfigVM_Task, 'duration_secs': 0.658144} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.056180] env[63538]: DEBUG oslo_concurrency.lockutils [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1138.056451] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Reconfigured VM to attach interface {{(pid=63538) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1943}} [ 1138.235965] env[63538]: DEBUG nova.objects.instance [None req-7b22c106-5578-4fe8-a665-e3ecde777a31 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lazy-loading 'flavor' on Instance uuid 3d80dc17-e330-4575-8e12-e06d8e76274a {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1138.273671] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1138.273920] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1138.274103] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.274262] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63538) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1138.275312] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dbed3a5-1d89-4d51-8d60-e001a5658bf1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.286525] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dfcbddf-b55f-47ca-a676-91857656d228 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.291930] env[63538]: DEBUG nova.compute.manager [req-466498cd-a9aa-4232-84a6-126c47137a73 req-46805674-4284-4a8b-8cf3-28efec8e2e03 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Received event network-changed-880907a0-da53-40af-a1ad-126b284f384c {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1138.293241] env[63538]: DEBUG nova.compute.manager [req-466498cd-a9aa-4232-84a6-126c47137a73 req-46805674-4284-4a8b-8cf3-28efec8e2e03 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Refreshing instance network info cache due to event network-changed-880907a0-da53-40af-a1ad-126b284f384c. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1138.293241] env[63538]: DEBUG oslo_concurrency.lockutils [req-466498cd-a9aa-4232-84a6-126c47137a73 req-46805674-4284-4a8b-8cf3-28efec8e2e03 service nova] Acquiring lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1138.293241] env[63538]: DEBUG oslo_concurrency.lockutils [req-466498cd-a9aa-4232-84a6-126c47137a73 req-46805674-4284-4a8b-8cf3-28efec8e2e03 service nova] Acquired lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1138.293241] env[63538]: DEBUG nova.network.neutron [req-466498cd-a9aa-4232-84a6-126c47137a73 req-46805674-4284-4a8b-8cf3-28efec8e2e03 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Refreshing network info cache for port 880907a0-da53-40af-a1ad-126b284f384c {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1138.307333] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e650fa1f-b80f-4685-bffd-7343d5a06789 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.316265] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97114fcd-d770-43e0-825c-2c8b7a753975 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.349653] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179256MB free_disk=95GB free_vcpus=48 pci_devices=None {{(pid=63538) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1138.349845] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1138.350063] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1138.521884] env[63538]: DEBUG oslo_vmware.api [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Task: {'id': task-5101754, 'name': PowerOnVM_Task, 'duration_secs': 0.572357} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.522117] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1138.522337] env[63538]: INFO nova.compute.manager [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Took 9.81 seconds to spawn the instance on the hypervisor. [ 1138.522966] env[63538]: DEBUG nova.compute.manager [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1138.523403] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29c22406-2a50-4bb1-b2c9-e0f0612eb271 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.535066] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1138.554744] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1d44fc9b-ffd6-43f0-955d-99759a220047 tempest-ServerShowV254Test-503485431 tempest-ServerShowV254Test-503485431-project-member] Lock "12beddad-1f19-4cee-b885-3079e3603ba3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.489s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.563331] env[63538]: DEBUG oslo_concurrency.lockutils [None req-068ac987-3ff5-4bec-a750-03120be5a2f5 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "interface-42af31f3-a9d0-4fdd-99fa-442ebe915277-880907a0-da53-40af-a1ad-126b284f384c" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.742s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.740850] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7b22c106-5578-4fe8-a665-e3ecde777a31 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "refresh_cache-3d80dc17-e330-4575-8e12-e06d8e76274a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1138.741047] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7b22c106-5578-4fe8-a665-e3ecde777a31 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquired lock "refresh_cache-3d80dc17-e330-4575-8e12-e06d8e76274a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1138.741242] env[63538]: DEBUG nova.network.neutron [None req-7b22c106-5578-4fe8-a665-e3ecde777a31 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1138.741429] env[63538]: DEBUG nova.objects.instance [None req-7b22c106-5578-4fe8-a665-e3ecde777a31 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lazy-loading 'info_cache' on Instance uuid 3d80dc17-e330-4575-8e12-e06d8e76274a {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1139.043918] env[63538]: INFO nova.compute.manager [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Took 14.70 seconds to build instance. [ 1139.107025] env[63538]: DEBUG nova.network.neutron [req-466498cd-a9aa-4232-84a6-126c47137a73 req-46805674-4284-4a8b-8cf3-28efec8e2e03 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Updated VIF entry in instance network info cache for port 880907a0-da53-40af-a1ad-126b284f384c. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1139.107580] env[63538]: DEBUG nova.network.neutron [req-466498cd-a9aa-4232-84a6-126c47137a73 req-46805674-4284-4a8b-8cf3-28efec8e2e03 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Updating instance_info_cache with network_info: [{"id": "dfaa4640-ae2a-444b-aa92-e24dd9eca692", "address": "fa:16:3e:f0:24:40", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfaa4640-ae", "ovs_interfaceid": "dfaa4640-ae2a-444b-aa92-e24dd9eca692", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "880907a0-da53-40af-a1ad-126b284f384c", "address": "fa:16:3e:f4:b1:8b", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap880907a0-da", "ovs_interfaceid": "880907a0-da53-40af-a1ad-126b284f384c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1139.245601] env[63538]: DEBUG nova.objects.base [None req-7b22c106-5578-4fe8-a665-e3ecde777a31 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Object Instance<3d80dc17-e330-4575-8e12-e06d8e76274a> lazy-loaded attributes: flavor,info_cache {{(pid=63538) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1139.386886] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance fb26fb32-a420-4667-850c-e32786edd8f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1139.387075] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 42af31f3-a9d0-4fdd-99fa-442ebe915277 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1139.387207] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 048573b4-26db-4a62-81e0-1bc1c3999d02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1139.387331] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 4387a3ec-0f0b-4917-97f3-08c737bee4e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1139.387456] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance d00151c1-ca34-4c57-9ed2-74d506a0cffb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1139.387576] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance e0d5a3b2-21e1-4de0-ac10-1a5687a60c10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1139.387734] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance aaf52cad-86fd-42df-8ee3-13724e3f5e94 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1139.387866] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 3d80dc17-e330-4575-8e12-e06d8e76274a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1139.388024] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 4e07fbfb-cae0-440d-8f75-c76cce3f7d00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1139.546615] env[63538]: DEBUG oslo_concurrency.lockutils [None req-4a6ca8aa-b09e-4fb6-953d-9d19a2a91e76 tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Lock "4e07fbfb-cae0-440d-8f75-c76cce3f7d00" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.209s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1139.610679] env[63538]: DEBUG oslo_concurrency.lockutils [req-466498cd-a9aa-4232-84a6-126c47137a73 req-46805674-4284-4a8b-8cf3-28efec8e2e03 service nova] Releasing lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1139.891997] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 0599fa68-1109-4edf-b42e-f81e7f09d641 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1139.892609] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=63538) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1139.892828] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2304MB phys_disk=100GB used_disk=8GB total_vcpus=48 used_vcpus=9 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '9', 'num_vm_active': '7', 'num_task_None': '7', 'num_os_type_None': '9', 'num_proj_df090f9a727d4cf4a0f466e27928bdc6': '2', 'io_workload': '1', 'num_vm_stopped': '1', 'num_task_powering-on': '1', 'num_proj_1a06b7cc1ab24ba584bbe970e4fc5e81': '1', 'num_proj_7063c42297c24f2baf7271fa25dec927': '2', 'num_proj_1fe11c1386b14d139f4416cbf20fb201': '1', 'num_proj_0d6954a5254f441ca256c85330297cef': '1', 'num_proj_eaf2a7f857d342f1923d44141fa59cfe': '1', 'num_vm_building': '1', 'num_task_spawning': '1', 'num_proj_65bc1bc418144f9bbdef50c18595311c': '1'} {{(pid=63538) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1140.006974] env[63538]: DEBUG nova.network.neutron [None req-7b22c106-5578-4fe8-a665-e3ecde777a31 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Updating instance_info_cache with network_info: [{"id": "2bf5c751-02ce-4e9e-8e98-68c3505b8aec", "address": "fa:16:3e:90:72:79", "network": {"id": "690e6150-6e85-472d-baeb-85e69afd037b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1831468396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a06b7cc1ab24ba584bbe970e4fc5e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bf5c751-02", "ovs_interfaceid": "2bf5c751-02ce-4e9e-8e98-68c3505b8aec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1140.052909] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6767ff16-e531-4a0c-a953-05b55bc0b96f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.061613] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b2642b1-093a-4bd2-a4a7-e4ef8ce02bbf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.096807] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "interface-42af31f3-a9d0-4fdd-99fa-442ebe915277-880907a0-da53-40af-a1ad-126b284f384c" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.097056] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "interface-42af31f3-a9d0-4fdd-99fa-442ebe915277-880907a0-da53-40af-a1ad-126b284f384c" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.099951] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3300b375-cd27-4a87-9810-e654567ec099 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.110839] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f794b82-d36c-4e42-8447-a33da93f691b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.128665] env[63538]: DEBUG nova.compute.provider_tree [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1140.512436] env[63538]: DEBUG oslo_concurrency.lockutils [None req-7b22c106-5578-4fe8-a665-e3ecde777a31 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Releasing lock "refresh_cache-3d80dc17-e330-4575-8e12-e06d8e76274a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1140.600389] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1140.600595] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.601637] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb955a25-0a07-42e1-a8f0-1887a387c64e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.627823] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b69fd14-34de-4ca6-9d4f-745a9f9e8dd9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.631877] env[63538]: DEBUG nova.scheduler.client.report [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1140.671540] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Reconfiguring VM to detach interface {{(pid=63538) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1974}} [ 1140.672529] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e2a722d-0cc1-4553-9fb9-4d82a6130003 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.704332] env[63538]: DEBUG oslo_vmware.api [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1140.704332] env[63538]: value = "task-5101756" [ 1140.704332] env[63538]: _type = "Task" [ 1140.704332] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.716892] env[63538]: DEBUG oslo_vmware.api [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101756, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.884596] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Acquiring lock "4e07fbfb-cae0-440d-8f75-c76cce3f7d00" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.885245] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Lock "4e07fbfb-cae0-440d-8f75-c76cce3f7d00" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.885512] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Acquiring lock "4e07fbfb-cae0-440d-8f75-c76cce3f7d00-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.885732] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Lock "4e07fbfb-cae0-440d-8f75-c76cce3f7d00-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.885913] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Lock "4e07fbfb-cae0-440d-8f75-c76cce3f7d00-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1140.888848] env[63538]: INFO nova.compute.manager [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Terminating instance [ 1140.891282] env[63538]: DEBUG nova.compute.manager [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1140.891473] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1140.892629] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd0f5017-faa1-4df2-b771-352fd4721679 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.901228] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1140.901594] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9a5d53da-8946-4393-91fa-c5c355af5702 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.909260] env[63538]: DEBUG oslo_vmware.api [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Waiting for the task: (returnval){ [ 1140.909260] env[63538]: value = "task-5101757" [ 1140.909260] env[63538]: _type = "Task" [ 1140.909260] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.919254] env[63538]: DEBUG oslo_vmware.api [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Task: {'id': task-5101757, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.018082] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b22c106-5578-4fe8-a665-e3ecde777a31 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1141.018082] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6eada9c4-dd1c-4091-affa-627553f8fcee {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.025277] env[63538]: DEBUG oslo_vmware.api [None req-7b22c106-5578-4fe8-a665-e3ecde777a31 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1141.025277] env[63538]: value = "task-5101758" [ 1141.025277] env[63538]: _type = "Task" [ 1141.025277] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.037094] env[63538]: DEBUG oslo_vmware.api [None req-7b22c106-5578-4fe8-a665-e3ecde777a31 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101758, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.139374] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63538) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1141.139813] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.790s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.140169] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.605s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1141.143420] env[63538]: INFO nova.compute.claims [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1141.205467] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-471f3c1b-1a42-4484-a8d5-70cd64969423 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Volume attach. Driver type: vmdk {{(pid=63538) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1141.205768] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-471f3c1b-1a42-4484-a8d5-70cd64969423 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992526', 'volume_id': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'name': 'volume-c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '048573b4-26db-4a62-81e0-1bc1c3999d02', 'attached_at': '', 'detached_at': '', 'volume_id': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'serial': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1141.206891] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad80d9f8-ddc6-463a-b53d-20d41c8a798b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.234883] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee2eca2-4958-4a5e-90e7-99f4a26e7edf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.238232] env[63538]: DEBUG oslo_vmware.api [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101756, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.265647] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-471f3c1b-1a42-4484-a8d5-70cd64969423 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] volume-c24d91c9-a15c-486f-a3b8-f0c4b143cda1/volume-c24d91c9-a15c-486f-a3b8-f0c4b143cda1.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1141.266472] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-707422cd-f5fd-47a6-9406-859f1b549343 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.286031] env[63538]: DEBUG oslo_vmware.api [None req-471f3c1b-1a42-4484-a8d5-70cd64969423 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1141.286031] env[63538]: value = "task-5101759" [ 1141.286031] env[63538]: _type = "Task" [ 1141.286031] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.297193] env[63538]: DEBUG oslo_vmware.api [None req-471f3c1b-1a42-4484-a8d5-70cd64969423 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101759, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.420189] env[63538]: DEBUG oslo_vmware.api [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Task: {'id': task-5101757, 'name': PowerOffVM_Task, 'duration_secs': 0.2649} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.420524] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1141.420524] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1141.420804] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1231052-44fb-49c0-af03-0149e9f1dddc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.507904] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1141.509406] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1141.509406] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Deleting the datastore file [datastore1] 4e07fbfb-cae0-440d-8f75-c76cce3f7d00 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1141.509406] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-accd0066-7e1a-4c0f-8698-a31a12a5fe69 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.517017] env[63538]: DEBUG oslo_vmware.api [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Waiting for the task: (returnval){ [ 1141.517017] env[63538]: value = "task-5101761" [ 1141.517017] env[63538]: _type = "Task" [ 1141.517017] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.529170] env[63538]: DEBUG oslo_vmware.api [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Task: {'id': task-5101761, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.539026] env[63538]: DEBUG oslo_vmware.api [None req-7b22c106-5578-4fe8-a665-e3ecde777a31 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101758, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.717601] env[63538]: DEBUG oslo_vmware.api [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101756, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.801541] env[63538]: DEBUG oslo_vmware.api [None req-471f3c1b-1a42-4484-a8d5-70cd64969423 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101759, 'name': ReconfigVM_Task, 'duration_secs': 0.425832} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.801541] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-471f3c1b-1a42-4484-a8d5-70cd64969423 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Reconfigured VM instance instance-00000067 to attach disk [datastore1] volume-c24d91c9-a15c-486f-a3b8-f0c4b143cda1/volume-c24d91c9-a15c-486f-a3b8-f0c4b143cda1.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1141.805825] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75247dc8-5609-4cae-a4aa-97583dfeebfb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.822398] env[63538]: DEBUG oslo_vmware.api [None req-471f3c1b-1a42-4484-a8d5-70cd64969423 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1141.822398] env[63538]: value = "task-5101762" [ 1141.822398] env[63538]: _type = "Task" [ 1141.822398] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.831900] env[63538]: DEBUG oslo_vmware.api [None req-471f3c1b-1a42-4484-a8d5-70cd64969423 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101762, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.027421] env[63538]: DEBUG oslo_vmware.api [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Task: {'id': task-5101761, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.445454} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.031355] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1142.031580] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1142.031775] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1142.031957] env[63538]: INFO nova.compute.manager [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1142.032233] env[63538]: DEBUG oslo.service.loopingcall [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1142.033019] env[63538]: DEBUG nova.compute.manager [-] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1142.033160] env[63538]: DEBUG nova.network.neutron [-] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1142.040911] env[63538]: DEBUG oslo_vmware.api [None req-7b22c106-5578-4fe8-a665-e3ecde777a31 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101758, 'name': PowerOnVM_Task, 'duration_secs': 0.529229} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.041268] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b22c106-5578-4fe8-a665-e3ecde777a31 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1142.041813] env[63538]: DEBUG nova.compute.manager [None req-7b22c106-5578-4fe8-a665-e3ecde777a31 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1142.042379] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-474ea339-afd0-47c4-9d69-80d76a4c4003 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.224131] env[63538]: DEBUG oslo_vmware.api [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101756, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.333876] env[63538]: DEBUG oslo_vmware.api [None req-471f3c1b-1a42-4484-a8d5-70cd64969423 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101762, 'name': ReconfigVM_Task, 'duration_secs': 0.159059} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.333876] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-471f3c1b-1a42-4484-a8d5-70cd64969423 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992526', 'volume_id': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'name': 'volume-c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '048573b4-26db-4a62-81e0-1bc1c3999d02', 'attached_at': '', 'detached_at': '', 'volume_id': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'serial': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1142.351749] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f54e482d-3b24-4676-a385-2bf0e4276e6c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.360214] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3798e9a-22e3-4bf2-9160-336b425cdde2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.400102] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31ec67a1-c4b0-445c-bd01-cb1acc55dd6a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.409085] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-675473c1-5db7-44b0-8323-ad6d5e7e1b4b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.425758] env[63538]: DEBUG nova.compute.provider_tree [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1142.429493] env[63538]: DEBUG nova.compute.manager [req-dcfb4f91-2123-4ad6-a6db-59168b542b0a req-746533bc-575b-43ba-ae26-e80cbb3d5efd service nova] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Received event network-vif-deleted-fec44204-572f-48af-b2f0-8ba48080ec72 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1142.429708] env[63538]: INFO nova.compute.manager [req-dcfb4f91-2123-4ad6-a6db-59168b542b0a req-746533bc-575b-43ba-ae26-e80cbb3d5efd service nova] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Neutron deleted interface fec44204-572f-48af-b2f0-8ba48080ec72; detaching it from the instance and deleting it from the info cache [ 1142.429890] env[63538]: DEBUG nova.network.neutron [req-dcfb4f91-2123-4ad6-a6db-59168b542b0a req-746533bc-575b-43ba-ae26-e80cbb3d5efd service nova] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.723134] env[63538]: DEBUG oslo_vmware.api [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101756, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.904291] env[63538]: DEBUG nova.network.neutron [-] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.934032] env[63538]: DEBUG nova.scheduler.client.report [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1142.941228] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d2c4670c-a63e-4297-bd16-ee62a99f5e16 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.951233] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-517a2d2c-ad56-4ea0-90a1-eeabf9989e5f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.988209] env[63538]: DEBUG nova.compute.manager [req-dcfb4f91-2123-4ad6-a6db-59168b542b0a req-746533bc-575b-43ba-ae26-e80cbb3d5efd service nova] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Detach interface failed, port_id=fec44204-572f-48af-b2f0-8ba48080ec72, reason: Instance 4e07fbfb-cae0-440d-8f75-c76cce3f7d00 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1143.222066] env[63538]: DEBUG oslo_vmware.api [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101756, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.244614] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "3d80dc17-e330-4575-8e12-e06d8e76274a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1143.244972] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "3d80dc17-e330-4575-8e12-e06d8e76274a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.245203] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "3d80dc17-e330-4575-8e12-e06d8e76274a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1143.245437] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "3d80dc17-e330-4575-8e12-e06d8e76274a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.245735] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "3d80dc17-e330-4575-8e12-e06d8e76274a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.247934] env[63538]: INFO nova.compute.manager [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Terminating instance [ 1143.249791] env[63538]: DEBUG nova.compute.manager [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1143.250044] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1143.250866] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abfa2318-8d95-4563-aa97-26ff527a3a79 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.259699] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1143.259996] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-03c93889-9428-413a-b10d-af39f5a93450 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.267600] env[63538]: DEBUG oslo_vmware.api [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1143.267600] env[63538]: value = "task-5101763" [ 1143.267600] env[63538]: _type = "Task" [ 1143.267600] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.276012] env[63538]: DEBUG oslo_vmware.api [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101763, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.385385] env[63538]: DEBUG nova.objects.instance [None req-471f3c1b-1a42-4484-a8d5-70cd64969423 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lazy-loading 'flavor' on Instance uuid 048573b4-26db-4a62-81e0-1bc1c3999d02 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1143.405823] env[63538]: INFO nova.compute.manager [-] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Took 1.37 seconds to deallocate network for instance. [ 1143.442219] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.302s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.442991] env[63538]: DEBUG nova.compute.manager [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1143.723535] env[63538]: DEBUG oslo_vmware.api [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101756, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.753324] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "e3feec17-ca1b-4873-bb0a-370c3868aabf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1143.753646] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "e3feec17-ca1b-4873-bb0a-370c3868aabf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.779353] env[63538]: DEBUG oslo_vmware.api [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101763, 'name': PowerOffVM_Task, 'duration_secs': 0.231464} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.779660] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1143.779814] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1143.780098] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4bacae3c-24ef-4a3e-8b67-986a78c868c7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.858129] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1143.858374] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1143.858562] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Deleting the datastore file [datastore2] 3d80dc17-e330-4575-8e12-e06d8e76274a {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1143.858841] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a239b664-0878-426a-89e8-badf3e74503f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.871092] env[63538]: DEBUG oslo_vmware.api [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1143.871092] env[63538]: value = "task-5101765" [ 1143.871092] env[63538]: _type = "Task" [ 1143.871092] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.880084] env[63538]: DEBUG oslo_vmware.api [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101765, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.893055] env[63538]: DEBUG oslo_concurrency.lockutils [None req-471f3c1b-1a42-4484-a8d5-70cd64969423 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "048573b4-26db-4a62-81e0-1bc1c3999d02" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.321s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.913177] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1143.913477] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.913704] env[63538]: DEBUG nova.objects.instance [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Lazy-loading 'resources' on Instance uuid 4e07fbfb-cae0-440d-8f75-c76cce3f7d00 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1143.948808] env[63538]: DEBUG nova.compute.utils [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1143.950477] env[63538]: DEBUG nova.compute.manager [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1143.950714] env[63538]: DEBUG nova.network.neutron [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1144.008399] env[63538]: DEBUG nova.policy [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee31bcde56b84cab97433478b88a639d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e00012e265f2423ab6e95706469cf1b1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1144.225926] env[63538]: DEBUG oslo_vmware.api [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101756, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.256442] env[63538]: DEBUG nova.compute.manager [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1144.354210] env[63538]: DEBUG nova.network.neutron [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Successfully created port: 4befc99f-ca57-4a1f-9199-7001666fbd6b {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1144.380957] env[63538]: DEBUG oslo_vmware.api [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101765, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188758} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.381240] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1144.381431] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1144.381617] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1144.381799] env[63538]: INFO nova.compute.manager [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1144.382060] env[63538]: DEBUG oslo.service.loopingcall [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1144.382266] env[63538]: DEBUG nova.compute.manager [-] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1144.382363] env[63538]: DEBUG nova.network.neutron [-] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1144.392825] env[63538]: INFO nova.compute.manager [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Rebuilding instance [ 1144.452479] env[63538]: DEBUG nova.compute.manager [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1144.453374] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb53d4ec-23ac-4d8a-8a5e-a04648b12d7d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.459188] env[63538]: DEBUG nova.compute.manager [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1144.600389] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-363676ae-cbf0-47c2-9087-76339b41534e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.609224] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e1308b9-c4c8-4399-bb22-76a35594c591 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.644942] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aac4de3-8250-45e3-be0b-030670ba59d6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.654500] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-010b339d-fc8c-4ab9-ae9e-8427db253385 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.676791] env[63538]: DEBUG nova.compute.provider_tree [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1144.728162] env[63538]: DEBUG oslo_vmware.api [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101756, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.775792] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1144.979853] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1144.980212] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d1f52041-d984-49f1-9fec-96c5fea56ba9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.989936] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1144.989936] env[63538]: value = "task-5101766" [ 1144.989936] env[63538]: _type = "Task" [ 1144.989936] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.001038] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101766, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.180905] env[63538]: DEBUG nova.scheduler.client.report [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1145.228480] env[63538]: DEBUG oslo_vmware.api [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101756, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.253556] env[63538]: DEBUG nova.compute.manager [req-deb84a22-98d5-4b5e-8e09-39e1d9b6ebb7 req-5a0fa507-effd-4667-9443-118c191d6ce1 service nova] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Received event network-vif-deleted-2bf5c751-02ce-4e9e-8e98-68c3505b8aec {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1145.253556] env[63538]: INFO nova.compute.manager [req-deb84a22-98d5-4b5e-8e09-39e1d9b6ebb7 req-5a0fa507-effd-4667-9443-118c191d6ce1 service nova] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Neutron deleted interface 2bf5c751-02ce-4e9e-8e98-68c3505b8aec; detaching it from the instance and deleting it from the info cache [ 1145.253556] env[63538]: DEBUG nova.network.neutron [req-deb84a22-98d5-4b5e-8e09-39e1d9b6ebb7 req-5a0fa507-effd-4667-9443-118c191d6ce1 service nova] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.469301] env[63538]: DEBUG nova.compute.manager [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1145.503818] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101766, 'name': PowerOffVM_Task, 'duration_secs': 0.395577} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.506267] env[63538]: DEBUG nova.virt.hardware [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1145.506514] env[63538]: DEBUG nova.virt.hardware [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1145.506711] env[63538]: DEBUG nova.virt.hardware [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1145.507013] env[63538]: DEBUG nova.virt.hardware [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1145.507132] env[63538]: DEBUG nova.virt.hardware [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1145.507352] env[63538]: DEBUG nova.virt.hardware [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1145.507438] env[63538]: DEBUG nova.virt.hardware [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1145.507628] env[63538]: DEBUG nova.virt.hardware [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1145.508073] env[63538]: DEBUG nova.virt.hardware [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1145.508147] env[63538]: DEBUG nova.virt.hardware [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1145.508306] env[63538]: DEBUG nova.virt.hardware [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1145.508607] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1145.510790] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ed9bd9-53d9-47c7-9bc0-970c1d26b637 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.522219] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b8f849c-053e-4fdd-95fb-a1786e363021 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.576516] env[63538]: INFO nova.compute.manager [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Detaching volume c24d91c9-a15c-486f-a3b8-f0c4b143cda1 [ 1145.610599] env[63538]: INFO nova.virt.block_device [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Attempting to driver detach volume c24d91c9-a15c-486f-a3b8-f0c4b143cda1 from mountpoint /dev/sdb [ 1145.610861] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Volume detach. Driver type: vmdk {{(pid=63538) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1145.611060] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992526', 'volume_id': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'name': 'volume-c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '048573b4-26db-4a62-81e0-1bc1c3999d02', 'attached_at': '', 'detached_at': '', 'volume_id': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'serial': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1145.611968] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d7d87de-baab-44d2-b129-58526f02df06 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.634199] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c485eb-fc92-48b8-8c2d-1e4bf864d152 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.642283] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2bc6d6-7512-4015-b4e4-c705e1483626 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.668084] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb3750e-414a-44bb-bebb-2a2e016dce0c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.687708] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.774s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1145.690415] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] The volume has not been displaced from its original location: [datastore1] volume-c24d91c9-a15c-486f-a3b8-f0c4b143cda1/volume-c24d91c9-a15c-486f-a3b8-f0c4b143cda1.vmdk. No consolidation needed. {{(pid=63538) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1145.697082] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Reconfiguring VM instance instance-00000067 to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1145.697890] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.922s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1145.699772] env[63538]: INFO nova.compute.claims [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1145.703713] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97b2be36-ad5d-458b-8f2e-df58f2272c3d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.722576] env[63538]: INFO nova.scheduler.client.report [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Deleted allocations for instance 4e07fbfb-cae0-440d-8f75-c76cce3f7d00 [ 1145.733245] env[63538]: DEBUG nova.network.neutron [-] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.734958] env[63538]: DEBUG oslo_vmware.api [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101756, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.735395] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1145.735395] env[63538]: value = "task-5101767" [ 1145.735395] env[63538]: _type = "Task" [ 1145.735395] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.746721] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101767, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.759527] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1c23344a-9a06-43d0-a6b4-93349f4a5c9b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.770979] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-470d1557-3fe5-41e3-8ccd-54ee76ae1b94 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.804479] env[63538]: DEBUG nova.compute.manager [req-deb84a22-98d5-4b5e-8e09-39e1d9b6ebb7 req-5a0fa507-effd-4667-9443-118c191d6ce1 service nova] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Detach interface failed, port_id=2bf5c751-02ce-4e9e-8e98-68c3505b8aec, reason: Instance 3d80dc17-e330-4575-8e12-e06d8e76274a could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1146.230944] env[63538]: DEBUG oslo_vmware.api [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101756, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.239137] env[63538]: INFO nova.compute.manager [-] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Took 1.86 seconds to deallocate network for instance. [ 1146.248413] env[63538]: DEBUG oslo_concurrency.lockutils [None req-0aaa5069-64bd-4362-8399-cd748ffd500e tempest-ServerMetadataTestJSON-466342864 tempest-ServerMetadataTestJSON-466342864-project-member] Lock "4e07fbfb-cae0-440d-8f75-c76cce3f7d00" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.363s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.257409] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101767, 'name': ReconfigVM_Task, 'duration_secs': 0.233596} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.257974] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Reconfigured VM instance instance-00000067 to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1146.263491] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f59cc2bc-14f4-475c-8ab4-7272fe5ef214 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.283764] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1146.283764] env[63538]: value = "task-5101768" [ 1146.283764] env[63538]: _type = "Task" [ 1146.283764] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.297168] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101768, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.298402] env[63538]: DEBUG nova.network.neutron [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Successfully updated port: 4befc99f-ca57-4a1f-9199-7001666fbd6b {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1146.732441] env[63538]: DEBUG oslo_vmware.api [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101756, 'name': ReconfigVM_Task, 'duration_secs': 5.783804} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.732805] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1146.732919] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Reconfigured VM to detach interface {{(pid=63538) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1984}} [ 1146.752256] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1146.795290] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101768, 'name': ReconfigVM_Task, 'duration_secs': 0.265704} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.798228] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992526', 'volume_id': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'name': 'volume-c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '048573b4-26db-4a62-81e0-1bc1c3999d02', 'attached_at': '', 'detached_at': '', 'volume_id': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'serial': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1146.800807] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Acquiring lock "refresh_cache-0599fa68-1109-4edf-b42e-f81e7f09d641" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1146.800946] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Acquired lock "refresh_cache-0599fa68-1109-4edf-b42e-f81e7f09d641" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.801226] env[63538]: DEBUG nova.network.neutron [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1146.885119] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9718aa3-2ec7-44bd-8918-8a32c55c423d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.894766] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a3cb0ce-1936-488f-8a37-d8ea727a955a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.927963] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec6472eb-4fb4-4a17-9605-576d62acf703 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.936833] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83434e90-c3ff-4201-84eb-e1e91aa3f179 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.951830] env[63538]: DEBUG nova.compute.provider_tree [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1147.293540] env[63538]: DEBUG nova.compute.manager [req-2a9f7f96-84ed-4ecd-a85a-aca7c210d8c1 req-9cd5ae5b-a7af-4b25-8104-dd8a9f939ff9 service nova] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Received event network-vif-plugged-4befc99f-ca57-4a1f-9199-7001666fbd6b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1147.293795] env[63538]: DEBUG oslo_concurrency.lockutils [req-2a9f7f96-84ed-4ecd-a85a-aca7c210d8c1 req-9cd5ae5b-a7af-4b25-8104-dd8a9f939ff9 service nova] Acquiring lock "0599fa68-1109-4edf-b42e-f81e7f09d641-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1147.294031] env[63538]: DEBUG oslo_concurrency.lockutils [req-2a9f7f96-84ed-4ecd-a85a-aca7c210d8c1 req-9cd5ae5b-a7af-4b25-8104-dd8a9f939ff9 service nova] Lock "0599fa68-1109-4edf-b42e-f81e7f09d641-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1147.294218] env[63538]: DEBUG oslo_concurrency.lockutils [req-2a9f7f96-84ed-4ecd-a85a-aca7c210d8c1 req-9cd5ae5b-a7af-4b25-8104-dd8a9f939ff9 service nova] Lock "0599fa68-1109-4edf-b42e-f81e7f09d641-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1147.294397] env[63538]: DEBUG nova.compute.manager [req-2a9f7f96-84ed-4ecd-a85a-aca7c210d8c1 req-9cd5ae5b-a7af-4b25-8104-dd8a9f939ff9 service nova] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] No waiting events found dispatching network-vif-plugged-4befc99f-ca57-4a1f-9199-7001666fbd6b {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1147.294571] env[63538]: WARNING nova.compute.manager [req-2a9f7f96-84ed-4ecd-a85a-aca7c210d8c1 req-9cd5ae5b-a7af-4b25-8104-dd8a9f939ff9 service nova] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Received unexpected event network-vif-plugged-4befc99f-ca57-4a1f-9199-7001666fbd6b for instance with vm_state building and task_state spawning. [ 1147.295070] env[63538]: DEBUG nova.compute.manager [req-2a9f7f96-84ed-4ecd-a85a-aca7c210d8c1 req-9cd5ae5b-a7af-4b25-8104-dd8a9f939ff9 service nova] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Received event network-changed-4befc99f-ca57-4a1f-9199-7001666fbd6b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1147.295070] env[63538]: DEBUG nova.compute.manager [req-2a9f7f96-84ed-4ecd-a85a-aca7c210d8c1 req-9cd5ae5b-a7af-4b25-8104-dd8a9f939ff9 service nova] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Refreshing instance network info cache due to event network-changed-4befc99f-ca57-4a1f-9199-7001666fbd6b. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1147.297748] env[63538]: DEBUG oslo_concurrency.lockutils [req-2a9f7f96-84ed-4ecd-a85a-aca7c210d8c1 req-9cd5ae5b-a7af-4b25-8104-dd8a9f939ff9 service nova] Acquiring lock "refresh_cache-0599fa68-1109-4edf-b42e-f81e7f09d641" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1147.365062] env[63538]: DEBUG nova.network.neutron [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1147.456225] env[63538]: DEBUG nova.scheduler.client.report [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1147.614562] env[63538]: DEBUG nova.network.neutron [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Updating instance_info_cache with network_info: [{"id": "4befc99f-ca57-4a1f-9199-7001666fbd6b", "address": "fa:16:3e:22:00:1e", "network": {"id": "17f839f4-0ed6-4480-8023-fea357885d6f", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1547066286-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e00012e265f2423ab6e95706469cf1b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f49a7d-c6e5-404f-b71a-91d8c070cd18", "external-id": "nsx-vlan-transportzone-120", "segmentation_id": 120, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4befc99f-ca", "ovs_interfaceid": "4befc99f-ca57-4a1f-9199-7001666fbd6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1147.869318] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1147.870831] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-af2833b0-665d-411e-bd52-75b983024d9d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.886971] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1147.886971] env[63538]: value = "task-5101769" [ 1147.886971] env[63538]: _type = "Task" [ 1147.886971] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.901519] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] VM already powered off {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1147.901519] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Volume detach. Driver type: vmdk {{(pid=63538) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1147.901519] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992526', 'volume_id': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'name': 'volume-c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '048573b4-26db-4a62-81e0-1bc1c3999d02', 'attached_at': '', 'detached_at': '', 'volume_id': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'serial': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1147.901519] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b235911-7829-4c8d-9366-4ccef5f45aaa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.922906] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f43df88-ac8f-4af0-b670-bac9135cd9d2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.932220] env[63538]: WARNING nova.virt.vmwareapi.driver [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1147.932534] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1147.933338] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a986e0e4-9a99-43ec-8592-b19e508dc426 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.940818] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1147.941091] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-11993506-7c57-4b3b-b98e-034b81e62ee6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.964794] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.267s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1147.965792] env[63538]: DEBUG nova.compute.manager [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1147.968249] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.216s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1147.968477] env[63538]: DEBUG nova.objects.instance [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lazy-loading 'resources' on Instance uuid 3d80dc17-e330-4575-8e12-e06d8e76274a {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1148.006792] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1148.006792] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1148.006792] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Deleting the datastore file [datastore2] 048573b4-26db-4a62-81e0-1bc1c3999d02 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1148.007107] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a4cb6d1-32ee-4300-9241-d9feba9575f9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.017277] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1148.017277] env[63538]: value = "task-5101771" [ 1148.017277] env[63538]: _type = "Task" [ 1148.017277] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.025983] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101771, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.116927] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Releasing lock "refresh_cache-0599fa68-1109-4edf-b42e-f81e7f09d641" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1148.117496] env[63538]: DEBUG nova.compute.manager [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Instance network_info: |[{"id": "4befc99f-ca57-4a1f-9199-7001666fbd6b", "address": "fa:16:3e:22:00:1e", "network": {"id": "17f839f4-0ed6-4480-8023-fea357885d6f", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1547066286-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e00012e265f2423ab6e95706469cf1b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f49a7d-c6e5-404f-b71a-91d8c070cd18", "external-id": "nsx-vlan-transportzone-120", "segmentation_id": 120, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4befc99f-ca", "ovs_interfaceid": "4befc99f-ca57-4a1f-9199-7001666fbd6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1148.118422] env[63538]: DEBUG oslo_concurrency.lockutils [req-2a9f7f96-84ed-4ecd-a85a-aca7c210d8c1 req-9cd5ae5b-a7af-4b25-8104-dd8a9f939ff9 service nova] Acquired lock "refresh_cache-0599fa68-1109-4edf-b42e-f81e7f09d641" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.118677] env[63538]: DEBUG nova.network.neutron [req-2a9f7f96-84ed-4ecd-a85a-aca7c210d8c1 req-9cd5ae5b-a7af-4b25-8104-dd8a9f939ff9 service nova] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Refreshing network info cache for port 4befc99f-ca57-4a1f-9199-7001666fbd6b {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1148.120431] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:00:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f5f49a7d-c6e5-404f-b71a-91d8c070cd18', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4befc99f-ca57-4a1f-9199-7001666fbd6b', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1148.132522] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Creating folder: Project (e00012e265f2423ab6e95706469cf1b1). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1148.134284] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7aae01a5-cdbe-4257-a55d-cd3b2081fdd9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.148847] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Created folder: Project (e00012e265f2423ab6e95706469cf1b1) in parent group-v992234. [ 1148.149368] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Creating folder: Instances. Parent ref: group-v992527. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1148.149533] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4a291775-b627-4f9e-af6f-88389231fff6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.161569] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Created folder: Instances in parent group-v992527. [ 1148.161959] env[63538]: DEBUG oslo.service.loopingcall [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1148.162348] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1148.162454] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3064e047-a857-42f5-af12-2316b1b3c7c3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.185016] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1148.185016] env[63538]: value = "task-5101774" [ 1148.185016] env[63538]: _type = "Task" [ 1148.185016] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.193839] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101774, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.378871] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1148.379206] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.379423] env[63538]: DEBUG nova.network.neutron [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1148.473142] env[63538]: DEBUG nova.compute.utils [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1148.479636] env[63538]: DEBUG nova.compute.manager [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1148.479974] env[63538]: DEBUG nova.network.neutron [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1148.528406] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101771, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.183936} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.529953] env[63538]: DEBUG nova.policy [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb444448a4d64c5e8ec9613ed633a527', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9b1eba931f144b94b6e186dac1310dfa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1148.531514] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1148.531719] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1148.531906] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1148.651329] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ef130d-6737-461c-a81b-c9e7e27c43dc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.659379] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a6c0735-32d1-4801-aa3e-c4ebf06c4f38 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.695897] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c6f9c6-9993-4e77-bdb9-f667800d78c5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.706993] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4598c854-2c7f-428d-af0f-3f5c213c8f91 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.710990] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101774, 'name': CreateVM_Task, 'duration_secs': 0.345381} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.711217] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1148.712324] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1148.712508] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.712851] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1148.713137] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffc6de4c-3ae3-4237-9b43-8245b9ed7176 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.725007] env[63538]: DEBUG nova.compute.provider_tree [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1148.730031] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Waiting for the task: (returnval){ [ 1148.730031] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526f7269-86bb-d797-5966-371f5e36581a" [ 1148.730031] env[63538]: _type = "Task" [ 1148.730031] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.739136] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526f7269-86bb-d797-5966-371f5e36581a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.938110] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "interface-4387a3ec-0f0b-4917-97f3-08c737bee4e7-880907a0-da53-40af-a1ad-126b284f384c" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.938550] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "interface-4387a3ec-0f0b-4917-97f3-08c737bee4e7-880907a0-da53-40af-a1ad-126b284f384c" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1148.938814] env[63538]: DEBUG nova.objects.instance [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lazy-loading 'flavor' on Instance uuid 4387a3ec-0f0b-4917-97f3-08c737bee4e7 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1148.978361] env[63538]: DEBUG nova.compute.manager [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1149.042582] env[63538]: INFO nova.virt.block_device [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Booting with volume c24d91c9-a15c-486f-a3b8-f0c4b143cda1 at /dev/sdb [ 1149.081335] env[63538]: DEBUG nova.network.neutron [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Successfully created port: c42aed5e-d684-4b97-aade-4acca4902f3d {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1149.089252] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-184dbd20-8967-4904-b6ad-c2557ba43eda {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.094222] env[63538]: DEBUG nova.network.neutron [req-2a9f7f96-84ed-4ecd-a85a-aca7c210d8c1 req-9cd5ae5b-a7af-4b25-8104-dd8a9f939ff9 service nova] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Updated VIF entry in instance network info cache for port 4befc99f-ca57-4a1f-9199-7001666fbd6b. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1149.094673] env[63538]: DEBUG nova.network.neutron [req-2a9f7f96-84ed-4ecd-a85a-aca7c210d8c1 req-9cd5ae5b-a7af-4b25-8104-dd8a9f939ff9 service nova] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Updating instance_info_cache with network_info: [{"id": "4befc99f-ca57-4a1f-9199-7001666fbd6b", "address": "fa:16:3e:22:00:1e", "network": {"id": "17f839f4-0ed6-4480-8023-fea357885d6f", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1547066286-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e00012e265f2423ab6e95706469cf1b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f49a7d-c6e5-404f-b71a-91d8c070cd18", "external-id": "nsx-vlan-transportzone-120", "segmentation_id": 120, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4befc99f-ca", "ovs_interfaceid": "4befc99f-ca57-4a1f-9199-7001666fbd6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1149.106024] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd7379a0-89b3-43d0-840c-f255dd2c2a59 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.141714] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-859194fd-087c-40c2-87d9-1bd2b373437a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.151149] env[63538]: INFO nova.network.neutron [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Port 880907a0-da53-40af-a1ad-126b284f384c from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1149.151149] env[63538]: DEBUG nova.network.neutron [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Updating instance_info_cache with network_info: [{"id": "dfaa4640-ae2a-444b-aa92-e24dd9eca692", "address": "fa:16:3e:f0:24:40", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfaa4640-ae", "ovs_interfaceid": "dfaa4640-ae2a-444b-aa92-e24dd9eca692", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1149.154645] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a6caaf-3012-4e4c-9901-f435e08d6f88 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.187591] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c588f939-3f25-4262-8023-90a9d53759e0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.195206] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e040ec-a393-4850-83fb-37ecfde14d8d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.209720] env[63538]: DEBUG nova.virt.block_device [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Updating existing volume attachment record: 6177d447-53f0-449b-af81-f575f19a0494 {{(pid=63538) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1149.231404] env[63538]: DEBUG nova.scheduler.client.report [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1149.242185] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526f7269-86bb-d797-5966-371f5e36581a, 'name': SearchDatastore_Task, 'duration_secs': 0.011994} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.242728] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1149.243087] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1149.243496] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1149.243772] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.244085] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1149.245051] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-444d112e-3385-4275-82c6-9b2b869418f5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.255451] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1149.255892] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1149.256793] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1252964b-d06a-4268-8ea3-3cc71365cb33 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.265019] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Waiting for the task: (returnval){ [ 1149.265019] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f2cb28-41e8-2c86-02e2-d834b5d7418b" [ 1149.265019] env[63538]: _type = "Task" [ 1149.265019] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.273704] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f2cb28-41e8-2c86-02e2-d834b5d7418b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.334836] env[63538]: DEBUG nova.compute.manager [req-e6fd9b25-a0ed-4335-9009-07d20f499ecc req-c6e2563e-8ae3-4e08-b8fd-f99280f7ba0d service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Received event network-changed-dfaa4640-ae2a-444b-aa92-e24dd9eca692 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1149.334967] env[63538]: DEBUG nova.compute.manager [req-e6fd9b25-a0ed-4335-9009-07d20f499ecc req-c6e2563e-8ae3-4e08-b8fd-f99280f7ba0d service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Refreshing instance network info cache due to event network-changed-dfaa4640-ae2a-444b-aa92-e24dd9eca692. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1149.335141] env[63538]: DEBUG oslo_concurrency.lockutils [req-e6fd9b25-a0ed-4335-9009-07d20f499ecc req-c6e2563e-8ae3-4e08-b8fd-f99280f7ba0d service nova] Acquiring lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1149.540878] env[63538]: DEBUG nova.objects.instance [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lazy-loading 'pci_requests' on Instance uuid 4387a3ec-0f0b-4917-97f3-08c737bee4e7 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1149.597817] env[63538]: DEBUG oslo_concurrency.lockutils [req-2a9f7f96-84ed-4ecd-a85a-aca7c210d8c1 req-9cd5ae5b-a7af-4b25-8104-dd8a9f939ff9 service nova] Releasing lock "refresh_cache-0599fa68-1109-4edf-b42e-f81e7f09d641" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1149.654138] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1149.656964] env[63538]: DEBUG oslo_concurrency.lockutils [req-e6fd9b25-a0ed-4335-9009-07d20f499ecc req-c6e2563e-8ae3-4e08-b8fd-f99280f7ba0d service nova] Acquired lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.656964] env[63538]: DEBUG nova.network.neutron [req-e6fd9b25-a0ed-4335-9009-07d20f499ecc req-c6e2563e-8ae3-4e08-b8fd-f99280f7ba0d service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Refreshing network info cache for port dfaa4640-ae2a-444b-aa92-e24dd9eca692 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1149.732964] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.765s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1149.758530] env[63538]: INFO nova.scheduler.client.report [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Deleted allocations for instance 3d80dc17-e330-4575-8e12-e06d8e76274a [ 1149.775056] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f2cb28-41e8-2c86-02e2-d834b5d7418b, 'name': SearchDatastore_Task, 'duration_secs': 0.010053} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.775631] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbfa1953-ac8b-45f4-8cb6-28bdec1ad43b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.782628] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Waiting for the task: (returnval){ [ 1149.782628] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bef48b-6d34-0807-cd9b-8d3142cdba48" [ 1149.782628] env[63538]: _type = "Task" [ 1149.782628] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.791213] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bef48b-6d34-0807-cd9b-8d3142cdba48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.989033] env[63538]: DEBUG nova.compute.manager [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1150.018378] env[63538]: DEBUG nova.virt.hardware [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1150.018634] env[63538]: DEBUG nova.virt.hardware [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1150.018888] env[63538]: DEBUG nova.virt.hardware [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1150.019139] env[63538]: DEBUG nova.virt.hardware [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1150.019300] env[63538]: DEBUG nova.virt.hardware [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1150.019454] env[63538]: DEBUG nova.virt.hardware [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1150.019672] env[63538]: DEBUG nova.virt.hardware [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1150.019835] env[63538]: DEBUG nova.virt.hardware [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1150.020009] env[63538]: DEBUG nova.virt.hardware [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1150.020197] env[63538]: DEBUG nova.virt.hardware [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1150.020373] env[63538]: DEBUG nova.virt.hardware [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1150.021308] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b76b442-1d00-4edf-a3dd-a04a28986fa3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.030650] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6956dee-03cc-477d-b151-f49d9c6f0a4f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.046654] env[63538]: DEBUG nova.objects.base [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Object Instance<4387a3ec-0f0b-4917-97f3-08c737bee4e7> lazy-loaded attributes: flavor,pci_requests {{(pid=63538) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1150.046863] env[63538]: DEBUG nova.network.neutron [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1150.149305] env[63538]: DEBUG nova.policy [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ebe77c0e32ce4a32b290ba5088e107f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7063c42297c24f2baf7271fa25dec927', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1150.160912] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8bc217b1-a062-4b55-aa2c-06c33affc2fd tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "interface-42af31f3-a9d0-4fdd-99fa-442ebe915277-880907a0-da53-40af-a1ad-126b284f384c" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.065s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.268960] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dd775a53-9d6f-4bee-9c05-0e0180548c8f tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "3d80dc17-e330-4575-8e12-e06d8e76274a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.024s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.294736] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bef48b-6d34-0807-cd9b-8d3142cdba48, 'name': SearchDatastore_Task, 'duration_secs': 0.010279} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.295049] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1150.295322] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 0599fa68-1109-4edf-b42e-f81e7f09d641/0599fa68-1109-4edf-b42e-f81e7f09d641.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1150.295589] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f0353736-9765-4387-a303-aaaca3badfe0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.303038] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Waiting for the task: (returnval){ [ 1150.303038] env[63538]: value = "task-5101775" [ 1150.303038] env[63538]: _type = "Task" [ 1150.303038] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.315657] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Task: {'id': task-5101775, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.381730] env[63538]: DEBUG nova.network.neutron [req-e6fd9b25-a0ed-4335-9009-07d20f499ecc req-c6e2563e-8ae3-4e08-b8fd-f99280f7ba0d service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Updated VIF entry in instance network info cache for port dfaa4640-ae2a-444b-aa92-e24dd9eca692. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1150.382141] env[63538]: DEBUG nova.network.neutron [req-e6fd9b25-a0ed-4335-9009-07d20f499ecc req-c6e2563e-8ae3-4e08-b8fd-f99280f7ba0d service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Updating instance_info_cache with network_info: [{"id": "dfaa4640-ae2a-444b-aa92-e24dd9eca692", "address": "fa:16:3e:f0:24:40", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfaa4640-ae", "ovs_interfaceid": "dfaa4640-ae2a-444b-aa92-e24dd9eca692", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.817276] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Task: {'id': task-5101775, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.838174] env[63538]: DEBUG nova.network.neutron [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Successfully updated port: c42aed5e-d684-4b97-aade-4acca4902f3d {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1150.885335] env[63538]: DEBUG oslo_concurrency.lockutils [req-e6fd9b25-a0ed-4335-9009-07d20f499ecc req-c6e2563e-8ae3-4e08-b8fd-f99280f7ba0d service nova] Releasing lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1150.885673] env[63538]: DEBUG nova.compute.manager [req-e6fd9b25-a0ed-4335-9009-07d20f499ecc req-c6e2563e-8ae3-4e08-b8fd-f99280f7ba0d service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Received event network-changed-f31eeedc-851d-457f-8464-c8562fdeaf87 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1150.885873] env[63538]: DEBUG nova.compute.manager [req-e6fd9b25-a0ed-4335-9009-07d20f499ecc req-c6e2563e-8ae3-4e08-b8fd-f99280f7ba0d service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Refreshing instance network info cache due to event network-changed-f31eeedc-851d-457f-8464-c8562fdeaf87. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1150.886210] env[63538]: DEBUG oslo_concurrency.lockutils [req-e6fd9b25-a0ed-4335-9009-07d20f499ecc req-c6e2563e-8ae3-4e08-b8fd-f99280f7ba0d service nova] Acquiring lock "refresh_cache-4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1150.886827] env[63538]: DEBUG oslo_concurrency.lockutils [req-e6fd9b25-a0ed-4335-9009-07d20f499ecc req-c6e2563e-8ae3-4e08-b8fd-f99280f7ba0d service nova] Acquired lock "refresh_cache-4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.886827] env[63538]: DEBUG nova.network.neutron [req-e6fd9b25-a0ed-4335-9009-07d20f499ecc req-c6e2563e-8ae3-4e08-b8fd-f99280f7ba0d service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Refreshing network info cache for port f31eeedc-851d-457f-8464-c8562fdeaf87 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1151.318882] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Task: {'id': task-5101775, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.341042] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "refresh_cache-e3feec17-ca1b-4873-bb0a-370c3868aabf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1151.341344] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquired lock "refresh_cache-e3feec17-ca1b-4873-bb0a-370c3868aabf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.341455] env[63538]: DEBUG nova.network.neutron [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1151.360767] env[63538]: DEBUG nova.virt.hardware [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1151.361174] env[63538]: DEBUG nova.virt.hardware [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1151.361278] env[63538]: DEBUG nova.virt.hardware [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1151.361423] env[63538]: DEBUG nova.virt.hardware [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1151.361690] env[63538]: DEBUG nova.virt.hardware [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1151.361796] env[63538]: DEBUG nova.virt.hardware [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1151.362126] env[63538]: DEBUG nova.virt.hardware [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1151.362224] env[63538]: DEBUG nova.virt.hardware [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1151.362550] env[63538]: DEBUG nova.virt.hardware [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1151.362863] env[63538]: DEBUG nova.virt.hardware [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1151.363149] env[63538]: DEBUG nova.virt.hardware [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1151.364358] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc365bf9-9130-4e48-85e1-fc450c86d900 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.374943] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-321c9686-e466-4f05-bed2-ac9fac5cf179 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.392929] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:54:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c621a9c-66f5-426a-8aab-bd8b2e912106', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3de39e87-f579-458e-a713-326821c5daa5', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1151.400771] env[63538]: DEBUG oslo.service.loopingcall [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1151.401518] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1151.402112] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6554df50-80fe-4113-ae92-f6b6e2ea7faa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.424587] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1151.424587] env[63538]: value = "task-5101776" [ 1151.424587] env[63538]: _type = "Task" [ 1151.424587] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.434319] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101776, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.574620] env[63538]: DEBUG nova.compute.manager [req-de0b064b-bbdf-4842-a6d5-254097393c55 req-fbb95b13-192c-4e40-8b24-1de95ea0705e service nova] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Received event network-vif-plugged-c42aed5e-d684-4b97-aade-4acca4902f3d {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1151.574904] env[63538]: DEBUG oslo_concurrency.lockutils [req-de0b064b-bbdf-4842-a6d5-254097393c55 req-fbb95b13-192c-4e40-8b24-1de95ea0705e service nova] Acquiring lock "e3feec17-ca1b-4873-bb0a-370c3868aabf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.575106] env[63538]: DEBUG oslo_concurrency.lockutils [req-de0b064b-bbdf-4842-a6d5-254097393c55 req-fbb95b13-192c-4e40-8b24-1de95ea0705e service nova] Lock "e3feec17-ca1b-4873-bb0a-370c3868aabf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.575286] env[63538]: DEBUG oslo_concurrency.lockutils [req-de0b064b-bbdf-4842-a6d5-254097393c55 req-fbb95b13-192c-4e40-8b24-1de95ea0705e service nova] Lock "e3feec17-ca1b-4873-bb0a-370c3868aabf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.575462] env[63538]: DEBUG nova.compute.manager [req-de0b064b-bbdf-4842-a6d5-254097393c55 req-fbb95b13-192c-4e40-8b24-1de95ea0705e service nova] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] No waiting events found dispatching network-vif-plugged-c42aed5e-d684-4b97-aade-4acca4902f3d {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1151.575633] env[63538]: WARNING nova.compute.manager [req-de0b064b-bbdf-4842-a6d5-254097393c55 req-fbb95b13-192c-4e40-8b24-1de95ea0705e service nova] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Received unexpected event network-vif-plugged-c42aed5e-d684-4b97-aade-4acca4902f3d for instance with vm_state building and task_state spawning. [ 1151.575797] env[63538]: DEBUG nova.compute.manager [req-de0b064b-bbdf-4842-a6d5-254097393c55 req-fbb95b13-192c-4e40-8b24-1de95ea0705e service nova] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Received event network-changed-c42aed5e-d684-4b97-aade-4acca4902f3d {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1151.575952] env[63538]: DEBUG nova.compute.manager [req-de0b064b-bbdf-4842-a6d5-254097393c55 req-fbb95b13-192c-4e40-8b24-1de95ea0705e service nova] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Refreshing instance network info cache due to event network-changed-c42aed5e-d684-4b97-aade-4acca4902f3d. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1151.576125] env[63538]: DEBUG oslo_concurrency.lockutils [req-de0b064b-bbdf-4842-a6d5-254097393c55 req-fbb95b13-192c-4e40-8b24-1de95ea0705e service nova] Acquiring lock "refresh_cache-e3feec17-ca1b-4873-bb0a-370c3868aabf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1151.606234] env[63538]: DEBUG nova.compute.manager [req-1b74f927-5134-4512-8226-f501542876f2 req-4d284964-5153-4a37-a59f-49905a689ba1 service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Received event network-vif-plugged-880907a0-da53-40af-a1ad-126b284f384c {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1151.606564] env[63538]: DEBUG oslo_concurrency.lockutils [req-1b74f927-5134-4512-8226-f501542876f2 req-4d284964-5153-4a37-a59f-49905a689ba1 service nova] Acquiring lock "4387a3ec-0f0b-4917-97f3-08c737bee4e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.606816] env[63538]: DEBUG oslo_concurrency.lockutils [req-1b74f927-5134-4512-8226-f501542876f2 req-4d284964-5153-4a37-a59f-49905a689ba1 service nova] Lock "4387a3ec-0f0b-4917-97f3-08c737bee4e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.606979] env[63538]: DEBUG oslo_concurrency.lockutils [req-1b74f927-5134-4512-8226-f501542876f2 req-4d284964-5153-4a37-a59f-49905a689ba1 service nova] Lock "4387a3ec-0f0b-4917-97f3-08c737bee4e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.607140] env[63538]: DEBUG nova.compute.manager [req-1b74f927-5134-4512-8226-f501542876f2 req-4d284964-5153-4a37-a59f-49905a689ba1 service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] No waiting events found dispatching network-vif-plugged-880907a0-da53-40af-a1ad-126b284f384c {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1151.607311] env[63538]: WARNING nova.compute.manager [req-1b74f927-5134-4512-8226-f501542876f2 req-4d284964-5153-4a37-a59f-49905a689ba1 service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Received unexpected event network-vif-plugged-880907a0-da53-40af-a1ad-126b284f384c for instance with vm_state active and task_state None. [ 1151.690231] env[63538]: DEBUG nova.network.neutron [req-e6fd9b25-a0ed-4335-9009-07d20f499ecc req-c6e2563e-8ae3-4e08-b8fd-f99280f7ba0d service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Updated VIF entry in instance network info cache for port f31eeedc-851d-457f-8464-c8562fdeaf87. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1151.690805] env[63538]: DEBUG nova.network.neutron [req-e6fd9b25-a0ed-4335-9009-07d20f499ecc req-c6e2563e-8ae3-4e08-b8fd-f99280f7ba0d service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Updating instance_info_cache with network_info: [{"id": "f31eeedc-851d-457f-8464-c8562fdeaf87", "address": "fa:16:3e:8d:d6:64", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf31eeedc-85", "ovs_interfaceid": "f31eeedc-851d-457f-8464-c8562fdeaf87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1151.715831] env[63538]: DEBUG nova.network.neutron [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Successfully updated port: 880907a0-da53-40af-a1ad-126b284f384c {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1151.819591] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Task: {'id': task-5101775, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.504587} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.819928] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 0599fa68-1109-4edf-b42e-f81e7f09d641/0599fa68-1109-4edf-b42e-f81e7f09d641.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1151.820247] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1151.820543] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4e9dcd09-291c-4a59-9b53-de23f1ca0cd3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.827641] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Waiting for the task: (returnval){ [ 1151.827641] env[63538]: value = "task-5101777" [ 1151.827641] env[63538]: _type = "Task" [ 1151.827641] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.837761] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Task: {'id': task-5101777, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.873584] env[63538]: DEBUG nova.network.neutron [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1151.936254] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101776, 'name': CreateVM_Task, 'duration_secs': 0.43298} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.936431] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1151.937197] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1151.937421] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.937779] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1151.938060] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d53384e-759d-43fe-8d41-2cef3a2116e1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.943984] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1151.943984] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5203fde0-5974-d448-e091-1c831257ed5f" [ 1151.943984] env[63538]: _type = "Task" [ 1151.943984] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.953416] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5203fde0-5974-d448-e091-1c831257ed5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.991222] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "f0183c1f-4557-45fd-ba65-4821ef661173" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.991483] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "f0183c1f-4557-45fd-ba65-4821ef661173" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.044718] env[63538]: DEBUG nova.network.neutron [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Updating instance_info_cache with network_info: [{"id": "c42aed5e-d684-4b97-aade-4acca4902f3d", "address": "fa:16:3e:ea:d4:32", "network": {"id": "64d377b2-188d-4b62-8b84-64d9c351ca10", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1354632003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b1eba931f144b94b6e186dac1310dfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc42aed5e-d6", "ovs_interfaceid": "c42aed5e-d684-4b97-aade-4acca4902f3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.193854] env[63538]: DEBUG oslo_concurrency.lockutils [req-e6fd9b25-a0ed-4335-9009-07d20f499ecc req-c6e2563e-8ae3-4e08-b8fd-f99280f7ba0d service nova] Releasing lock "refresh_cache-4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1152.222082] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "refresh_cache-4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1152.222082] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "refresh_cache-4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.222265] env[63538]: DEBUG nova.network.neutron [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1152.338157] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Task: {'id': task-5101777, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070466} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.338532] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1152.339295] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feff75a1-a211-4cfe-8c1a-b46a31861ce2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.361750] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] 0599fa68-1109-4edf-b42e-f81e7f09d641/0599fa68-1109-4edf-b42e-f81e7f09d641.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1152.362139] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06e56951-137d-4e83-b3b9-ad020f8159c9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.383601] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Waiting for the task: (returnval){ [ 1152.383601] env[63538]: value = "task-5101778" [ 1152.383601] env[63538]: _type = "Task" [ 1152.383601] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.392772] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Task: {'id': task-5101778, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.456081] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5203fde0-5974-d448-e091-1c831257ed5f, 'name': SearchDatastore_Task, 'duration_secs': 0.012249} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.456372] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1152.456609] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1152.456939] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1152.457116] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.457314] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1152.457591] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f07ee88-91a5-4ac4-9883-ac12fb5fff69 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.467246] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1152.467456] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1152.468287] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92658db4-cd49-4b8a-bf31-c82b0616413d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.474297] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1152.474297] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52781416-9af8-e44e-aece-7e3dc6456321" [ 1152.474297] env[63538]: _type = "Task" [ 1152.474297] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.483156] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52781416-9af8-e44e-aece-7e3dc6456321, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.493768] env[63538]: DEBUG nova.compute.manager [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1152.549616] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Releasing lock "refresh_cache-e3feec17-ca1b-4873-bb0a-370c3868aabf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1152.549941] env[63538]: DEBUG nova.compute.manager [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Instance network_info: |[{"id": "c42aed5e-d684-4b97-aade-4acca4902f3d", "address": "fa:16:3e:ea:d4:32", "network": {"id": "64d377b2-188d-4b62-8b84-64d9c351ca10", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1354632003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b1eba931f144b94b6e186dac1310dfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc42aed5e-d6", "ovs_interfaceid": "c42aed5e-d684-4b97-aade-4acca4902f3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1152.551020] env[63538]: DEBUG oslo_concurrency.lockutils [req-de0b064b-bbdf-4842-a6d5-254097393c55 req-fbb95b13-192c-4e40-8b24-1de95ea0705e service nova] Acquired lock "refresh_cache-e3feec17-ca1b-4873-bb0a-370c3868aabf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.551243] env[63538]: DEBUG nova.network.neutron [req-de0b064b-bbdf-4842-a6d5-254097393c55 req-fbb95b13-192c-4e40-8b24-1de95ea0705e service nova] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Refreshing network info cache for port c42aed5e-d684-4b97-aade-4acca4902f3d {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1152.553648] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:d4:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37434b93-dfdc-4a3f-bf5a-9f2cbe25a754', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c42aed5e-d684-4b97-aade-4acca4902f3d', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1152.563453] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Creating folder: Project (9b1eba931f144b94b6e186dac1310dfa). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1152.564829] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8e3a36b6-882a-4944-94a6-77ed801a0b51 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.577247] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Created folder: Project (9b1eba931f144b94b6e186dac1310dfa) in parent group-v992234. [ 1152.577459] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Creating folder: Instances. Parent ref: group-v992531. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1152.577710] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-acb1b0a0-df9f-47f6-bd67-57d4eda62080 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.587449] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Created folder: Instances in parent group-v992531. [ 1152.587782] env[63538]: DEBUG oslo.service.loopingcall [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1152.587977] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1152.588211] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b64b25b-5f0b-479e-ad3a-10d4cb5890ba {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.608072] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1152.608072] env[63538]: value = "task-5101781" [ 1152.608072] env[63538]: _type = "Task" [ 1152.608072] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.617126] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101781, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.760975] env[63538]: WARNING nova.network.neutron [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] 44a04a43-a979-4648-89b2-63323df5b0f3 already exists in list: networks containing: ['44a04a43-a979-4648-89b2-63323df5b0f3']. ignoring it [ 1152.900514] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Task: {'id': task-5101778, 'name': ReconfigVM_Task, 'duration_secs': 0.336276} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.900514] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Reconfigured VM instance instance-0000006f to attach disk [datastore2] 0599fa68-1109-4edf-b42e-f81e7f09d641/0599fa68-1109-4edf-b42e-f81e7f09d641.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1152.900514] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-50dce5da-8ac8-45ea-bb56-88ff58d53211 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.908449] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Waiting for the task: (returnval){ [ 1152.908449] env[63538]: value = "task-5101782" [ 1152.908449] env[63538]: _type = "Task" [ 1152.908449] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.919116] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Task: {'id': task-5101782, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.987542] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52781416-9af8-e44e-aece-7e3dc6456321, 'name': SearchDatastore_Task, 'duration_secs': 0.009748} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.990832] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-194dfdf4-a4ad-4de0-89b5-f7c5fe9e4f92 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.996903] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1152.996903] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52502d62-47f6-20fc-8edc-04c929c2a26c" [ 1152.996903] env[63538]: _type = "Task" [ 1152.996903] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.011854] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52502d62-47f6-20fc-8edc-04c929c2a26c, 'name': SearchDatastore_Task, 'duration_secs': 0.01152} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.012522] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1153.012522] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 048573b4-26db-4a62-81e0-1bc1c3999d02/048573b4-26db-4a62-81e0-1bc1c3999d02.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1153.012886] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a640a95-0122-4f8a-9c36-7f5d9405c1e7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.020056] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1153.020327] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1153.022078] env[63538]: INFO nova.compute.claims [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1153.029171] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1153.029171] env[63538]: value = "task-5101783" [ 1153.029171] env[63538]: _type = "Task" [ 1153.029171] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.038091] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101783, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.107571] env[63538]: DEBUG nova.network.neutron [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Updating instance_info_cache with network_info: [{"id": "f31eeedc-851d-457f-8464-c8562fdeaf87", "address": "fa:16:3e:8d:d6:64", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf31eeedc-85", "ovs_interfaceid": "f31eeedc-851d-457f-8464-c8562fdeaf87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "880907a0-da53-40af-a1ad-126b284f384c", "address": "fa:16:3e:f4:b1:8b", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap880907a0-da", "ovs_interfaceid": "880907a0-da53-40af-a1ad-126b284f384c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1153.120295] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101781, 'name': CreateVM_Task, 'duration_secs': 0.364836} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.120468] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1153.121217] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1153.121425] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.121754] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1153.122619] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00d1ddee-e20d-461b-a0ca-a78c7815fc06 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.128772] env[63538]: DEBUG oslo_vmware.api [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1153.128772] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5224fb1e-d024-01ba-f366-031c169e1074" [ 1153.128772] env[63538]: _type = "Task" [ 1153.128772] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.136609] env[63538]: DEBUG oslo_vmware.api [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5224fb1e-d024-01ba-f366-031c169e1074, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.297876] env[63538]: DEBUG nova.network.neutron [req-de0b064b-bbdf-4842-a6d5-254097393c55 req-fbb95b13-192c-4e40-8b24-1de95ea0705e service nova] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Updated VIF entry in instance network info cache for port c42aed5e-d684-4b97-aade-4acca4902f3d. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1153.298320] env[63538]: DEBUG nova.network.neutron [req-de0b064b-bbdf-4842-a6d5-254097393c55 req-fbb95b13-192c-4e40-8b24-1de95ea0705e service nova] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Updating instance_info_cache with network_info: [{"id": "c42aed5e-d684-4b97-aade-4acca4902f3d", "address": "fa:16:3e:ea:d4:32", "network": {"id": "64d377b2-188d-4b62-8b84-64d9c351ca10", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1354632003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b1eba931f144b94b6e186dac1310dfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc42aed5e-d6", "ovs_interfaceid": "c42aed5e-d684-4b97-aade-4acca4902f3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1153.420717] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Task: {'id': task-5101782, 'name': Rename_Task, 'duration_secs': 0.170238} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.421168] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1153.421444] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1f26e323-a6b3-478a-b639-7f0dd093cf76 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.429739] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Waiting for the task: (returnval){ [ 1153.429739] env[63538]: value = "task-5101784" [ 1153.429739] env[63538]: _type = "Task" [ 1153.429739] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.439143] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Task: {'id': task-5101784, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.541811] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101783, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.615398] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "refresh_cache-4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1153.616183] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1153.616351] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.617291] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e28c1133-137d-4800-a958-1587a41125ed {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.635525] env[63538]: DEBUG nova.virt.hardware [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1153.635783] env[63538]: DEBUG nova.virt.hardware [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1153.635952] env[63538]: DEBUG nova.virt.hardware [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1153.636158] env[63538]: DEBUG nova.virt.hardware [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1153.636312] env[63538]: DEBUG nova.virt.hardware [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1153.636470] env[63538]: DEBUG nova.virt.hardware [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1153.636696] env[63538]: DEBUG nova.virt.hardware [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1153.636866] env[63538]: DEBUG nova.virt.hardware [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1153.637049] env[63538]: DEBUG nova.virt.hardware [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1153.637224] env[63538]: DEBUG nova.virt.hardware [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1153.637405] env[63538]: DEBUG nova.virt.hardware [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1153.643787] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Reconfiguring VM to attach interface {{(pid=63538) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1929}} [ 1153.648494] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-797b78e3-a32b-4b8b-b7eb-ea9d376ecec6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.662279] env[63538]: DEBUG nova.compute.manager [req-f651c77d-80fc-4110-bff0-d571c487ff47 req-762f418a-061a-4432-bad4-965e5576655e service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Received event network-changed-880907a0-da53-40af-a1ad-126b284f384c {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1153.662483] env[63538]: DEBUG nova.compute.manager [req-f651c77d-80fc-4110-bff0-d571c487ff47 req-762f418a-061a-4432-bad4-965e5576655e service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Refreshing instance network info cache due to event network-changed-880907a0-da53-40af-a1ad-126b284f384c. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1153.662722] env[63538]: DEBUG oslo_concurrency.lockutils [req-f651c77d-80fc-4110-bff0-d571c487ff47 req-762f418a-061a-4432-bad4-965e5576655e service nova] Acquiring lock "refresh_cache-4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1153.662872] env[63538]: DEBUG oslo_concurrency.lockutils [req-f651c77d-80fc-4110-bff0-d571c487ff47 req-762f418a-061a-4432-bad4-965e5576655e service nova] Acquired lock "refresh_cache-4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.663051] env[63538]: DEBUG nova.network.neutron [req-f651c77d-80fc-4110-bff0-d571c487ff47 req-762f418a-061a-4432-bad4-965e5576655e service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Refreshing network info cache for port 880907a0-da53-40af-a1ad-126b284f384c {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1153.671544] env[63538]: DEBUG oslo_vmware.api [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5224fb1e-d024-01ba-f366-031c169e1074, 'name': SearchDatastore_Task, 'duration_secs': 0.011235} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.673007] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1153.673277] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1153.673516] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1153.673667] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.673854] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1153.674979] env[63538]: DEBUG oslo_vmware.api [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1153.674979] env[63538]: value = "task-5101785" [ 1153.674979] env[63538]: _type = "Task" [ 1153.674979] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.675239] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-59e54682-fd1f-44fc-97af-8a6b36e012a9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.690701] env[63538]: DEBUG oslo_vmware.api [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101785, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.692107] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1153.692107] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1153.692289] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02ca0a58-a21e-41ae-a8a4-8aa0577a1d93 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.699320] env[63538]: DEBUG oslo_vmware.api [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1153.699320] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ed5f69-1070-a2ad-0e79-124c5d7f32b6" [ 1153.699320] env[63538]: _type = "Task" [ 1153.699320] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.708632] env[63538]: DEBUG oslo_vmware.api [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ed5f69-1070-a2ad-0e79-124c5d7f32b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.801625] env[63538]: DEBUG oslo_concurrency.lockutils [req-de0b064b-bbdf-4842-a6d5-254097393c55 req-fbb95b13-192c-4e40-8b24-1de95ea0705e service nova] Releasing lock "refresh_cache-e3feec17-ca1b-4873-bb0a-370c3868aabf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1153.944594] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Task: {'id': task-5101784, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.047310] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101783, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524364} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.047310] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 048573b4-26db-4a62-81e0-1bc1c3999d02/048573b4-26db-4a62-81e0-1bc1c3999d02.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1154.047521] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1154.047809] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e5da3909-0650-4464-a083-9583d6e1fbaf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.055837] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1154.055837] env[63538]: value = "task-5101786" [ 1154.055837] env[63538]: _type = "Task" [ 1154.055837] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.068636] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101786, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.180895] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487de17a-c09a-420a-87e8-1b65f617f3f4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.192321] env[63538]: DEBUG oslo_vmware.api [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101785, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.195359] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55f3451e-d3aa-48c1-bb2e-73c13cdd9170 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.230342] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83ce59a4-5c8f-4ad7-9b32-47ac63a23d0f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.238587] env[63538]: DEBUG oslo_vmware.api [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ed5f69-1070-a2ad-0e79-124c5d7f32b6, 'name': SearchDatastore_Task, 'duration_secs': 0.010468} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.239755] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba70511f-4559-46b7-b29f-f0b354884e53 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.246449] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9adc1070-74cc-448f-b6b0-e3ae3d0b88a1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.251779] env[63538]: DEBUG oslo_vmware.api [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1154.251779] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e6d079-b6d0-fab0-c55b-9735dfdf6399" [ 1154.251779] env[63538]: _type = "Task" [ 1154.251779] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.263441] env[63538]: DEBUG nova.compute.provider_tree [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1154.270586] env[63538]: DEBUG oslo_vmware.api [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e6d079-b6d0-fab0-c55b-9735dfdf6399, 'name': SearchDatastore_Task, 'duration_secs': 0.010802} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.270970] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1154.271172] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] e3feec17-ca1b-4873-bb0a-370c3868aabf/e3feec17-ca1b-4873-bb0a-370c3868aabf.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1154.272076] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cb4e5146-3a78-44b1-b1d1-66a8bdfe0719 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.280708] env[63538]: DEBUG oslo_vmware.api [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1154.280708] env[63538]: value = "task-5101787" [ 1154.280708] env[63538]: _type = "Task" [ 1154.280708] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.289513] env[63538]: DEBUG oslo_vmware.api [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101787, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.418821] env[63538]: DEBUG nova.network.neutron [req-f651c77d-80fc-4110-bff0-d571c487ff47 req-762f418a-061a-4432-bad4-965e5576655e service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Updated VIF entry in instance network info cache for port 880907a0-da53-40af-a1ad-126b284f384c. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1154.419366] env[63538]: DEBUG nova.network.neutron [req-f651c77d-80fc-4110-bff0-d571c487ff47 req-762f418a-061a-4432-bad4-965e5576655e service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Updating instance_info_cache with network_info: [{"id": "f31eeedc-851d-457f-8464-c8562fdeaf87", "address": "fa:16:3e:8d:d6:64", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf31eeedc-85", "ovs_interfaceid": "f31eeedc-851d-457f-8464-c8562fdeaf87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "880907a0-da53-40af-a1ad-126b284f384c", "address": "fa:16:3e:f4:b1:8b", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap880907a0-da", "ovs_interfaceid": "880907a0-da53-40af-a1ad-126b284f384c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.442087] env[63538]: DEBUG oslo_vmware.api [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Task: {'id': task-5101784, 'name': PowerOnVM_Task, 'duration_secs': 0.525662} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.442481] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1154.442660] env[63538]: INFO nova.compute.manager [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Took 8.97 seconds to spawn the instance on the hypervisor. [ 1154.442784] env[63538]: DEBUG nova.compute.manager [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1154.443637] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df214b87-2c35-4df4-bbe8-2cfdd10edf88 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.568451] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101786, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069595} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.568761] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1154.569704] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ebf807-c1f5-4486-9379-d822d0a190e4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.596422] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 048573b4-26db-4a62-81e0-1bc1c3999d02/048573b4-26db-4a62-81e0-1bc1c3999d02.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1154.596841] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27c0fee9-1b7f-4b12-9499-167e54bc07f3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.621022] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1154.621022] env[63538]: value = "task-5101788" [ 1154.621022] env[63538]: _type = "Task" [ 1154.621022] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.633209] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101788, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.691072] env[63538]: DEBUG oslo_vmware.api [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101785, 'name': ReconfigVM_Task, 'duration_secs': 1.010763} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.691750] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1154.692049] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Reconfigured VM to attach interface {{(pid=63538) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1943}} [ 1154.768267] env[63538]: DEBUG nova.scheduler.client.report [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1154.793180] env[63538]: DEBUG oslo_vmware.api [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101787, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.924301] env[63538]: DEBUG oslo_concurrency.lockutils [req-f651c77d-80fc-4110-bff0-d571c487ff47 req-762f418a-061a-4432-bad4-965e5576655e service nova] Releasing lock "refresh_cache-4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1154.964413] env[63538]: INFO nova.compute.manager [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Took 16.46 seconds to build instance. [ 1155.131995] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101788, 'name': ReconfigVM_Task, 'duration_secs': 0.387385} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.132318] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 048573b4-26db-4a62-81e0-1bc1c3999d02/048573b4-26db-4a62-81e0-1bc1c3999d02.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1155.133413] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_options': None, 'encryption_format': None, 'device_name': '/dev/sda', 'size': 0, 'encryption_secret_uuid': None, 'encrypted': False, 'guest_format': None, 'device_type': 'disk', 'boot_index': 0, 'disk_bus': None, 'image_id': 'faabbca4-e27b-433a-b93d-f059fd73bc92'}], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992526', 'volume_id': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'name': 'volume-c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '048573b4-26db-4a62-81e0-1bc1c3999d02', 'attached_at': '', 'detached_at': '', 'volume_id': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'serial': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1'}, 'delete_on_termination': False, 'attachment_id': '6177d447-53f0-449b-af81-f575f19a0494', 'guest_format': None, 'mount_device': '/dev/sdb', 'device_type': None, 'boot_index': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=63538) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1155.133625] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Volume attach. Driver type: vmdk {{(pid=63538) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1155.133821] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992526', 'volume_id': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'name': 'volume-c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '048573b4-26db-4a62-81e0-1bc1c3999d02', 'attached_at': '', 'detached_at': '', 'volume_id': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'serial': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1155.134629] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9811ccd-d82c-4206-a336-e46d990a2bbf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.152294] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb696caf-fe79-42aa-bd79-53d5d10da27e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.180466] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] volume-c24d91c9-a15c-486f-a3b8-f0c4b143cda1/volume-c24d91c9-a15c-486f-a3b8-f0c4b143cda1.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1155.180887] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23716f8c-b938-467a-8cc5-1ba3764955e0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.201640] env[63538]: DEBUG oslo_concurrency.lockutils [None req-9f3cdfb7-478f-4801-bcbd-8b82f75c8f62 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "interface-4387a3ec-0f0b-4917-97f3-08c737bee4e7-880907a0-da53-40af-a1ad-126b284f384c" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.263s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.203096] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1155.203096] env[63538]: value = "task-5101789" [ 1155.203096] env[63538]: _type = "Task" [ 1155.203096] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.212650] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101789, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.273360] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.253s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.274018] env[63538]: DEBUG nova.compute.manager [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1155.292978] env[63538]: DEBUG oslo_vmware.api [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101787, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527022} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.293304] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] e3feec17-ca1b-4873-bb0a-370c3868aabf/e3feec17-ca1b-4873-bb0a-370c3868aabf.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1155.293541] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1155.293812] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-615b093c-20e8-4750-bba0-01d9de2e010b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.301621] env[63538]: DEBUG oslo_vmware.api [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1155.301621] env[63538]: value = "task-5101790" [ 1155.301621] env[63538]: _type = "Task" [ 1155.301621] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.312372] env[63538]: DEBUG oslo_vmware.api [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101790, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.465535] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c2d4de05-3f2e-4f0d-843a-0f766eb2d4c5 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Lock "0599fa68-1109-4edf-b42e-f81e7f09d641" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.980s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.718915] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101789, 'name': ReconfigVM_Task, 'duration_secs': 0.345614} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.719170] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Reconfigured VM instance instance-00000067 to attach disk [datastore1] volume-c24d91c9-a15c-486f-a3b8-f0c4b143cda1/volume-c24d91c9-a15c-486f-a3b8-f0c4b143cda1.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1155.724251] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c157e917-1604-4159-8784-f25a934beac4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.744883] env[63538]: DEBUG oslo_concurrency.lockutils [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Acquiring lock "0599fa68-1109-4edf-b42e-f81e7f09d641" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1155.745201] env[63538]: DEBUG oslo_concurrency.lockutils [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Lock "0599fa68-1109-4edf-b42e-f81e7f09d641" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1155.745452] env[63538]: DEBUG oslo_concurrency.lockutils [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Acquiring lock "0599fa68-1109-4edf-b42e-f81e7f09d641-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1155.746046] env[63538]: DEBUG oslo_concurrency.lockutils [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Lock "0599fa68-1109-4edf-b42e-f81e7f09d641-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1155.746046] env[63538]: DEBUG oslo_concurrency.lockutils [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Lock "0599fa68-1109-4edf-b42e-f81e7f09d641-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.747768] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1155.747768] env[63538]: value = "task-5101791" [ 1155.747768] env[63538]: _type = "Task" [ 1155.747768] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.748376] env[63538]: INFO nova.compute.manager [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Terminating instance [ 1155.753715] env[63538]: DEBUG nova.compute.manager [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1155.753890] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1155.754711] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd9b1992-c846-419e-8d21-bd5f078bd976 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.763866] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101791, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.766133] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1155.766423] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c13d1dd-d38f-4caa-ba3a-4b91b2e56442 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.774187] env[63538]: DEBUG oslo_vmware.api [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Waiting for the task: (returnval){ [ 1155.774187] env[63538]: value = "task-5101792" [ 1155.774187] env[63538]: _type = "Task" [ 1155.774187] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.779066] env[63538]: DEBUG nova.compute.utils [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1155.780542] env[63538]: DEBUG nova.compute.manager [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1155.780722] env[63538]: DEBUG nova.network.neutron [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1155.788247] env[63538]: DEBUG oslo_vmware.api [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Task: {'id': task-5101792, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.814854] env[63538]: DEBUG oslo_vmware.api [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101790, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070788} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.815265] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1155.816370] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a1b148-30e1-4b72-8b77-242097481015 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.844375] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] e3feec17-ca1b-4873-bb0a-370c3868aabf/e3feec17-ca1b-4873-bb0a-370c3868aabf.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1155.846202] env[63538]: DEBUG nova.policy [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ad1bddeca5346dea39d23339e09db3d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1a06b7cc1ab24ba584bbe970e4fc5e81', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1155.848024] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4a6ec35-0345-4609-98d1-0d5bb5542f92 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.872587] env[63538]: DEBUG oslo_vmware.api [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1155.872587] env[63538]: value = "task-5101793" [ 1155.872587] env[63538]: _type = "Task" [ 1155.872587] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.885412] env[63538]: DEBUG oslo_vmware.api [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101793, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.267031] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101791, 'name': ReconfigVM_Task, 'duration_secs': 0.176827} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.267388] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992526', 'volume_id': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'name': 'volume-c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '048573b4-26db-4a62-81e0-1bc1c3999d02', 'attached_at': '', 'detached_at': '', 'volume_id': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'serial': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1156.269452] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-33703bda-a6d7-4aac-bab4-66698e45255a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.279879] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1156.279879] env[63538]: value = "task-5101794" [ 1156.279879] env[63538]: _type = "Task" [ 1156.279879] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.289753] env[63538]: DEBUG nova.compute.manager [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1156.294275] env[63538]: DEBUG oslo_vmware.api [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Task: {'id': task-5101792, 'name': PowerOffVM_Task, 'duration_secs': 0.309583} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.294339] env[63538]: DEBUG nova.network.neutron [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Successfully created port: 9f09c892-0333-4063-a5da-daa3e2bf19f5 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1156.297221] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1156.297393] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1156.298319] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e5d41844-c2ab-475f-9e45-d9d5bb944791 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.305598] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101794, 'name': Rename_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.387260] env[63538]: DEBUG oslo_vmware.api [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101793, 'name': ReconfigVM_Task, 'duration_secs': 0.282477} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.387601] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Reconfigured VM instance instance-00000070 to attach disk [datastore2] e3feec17-ca1b-4873-bb0a-370c3868aabf/e3feec17-ca1b-4873-bb0a-370c3868aabf.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1156.388864] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c410575b-c2b7-498a-95b6-bd17d3223612 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.393546] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1156.394236] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1156.394236] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Deleting the datastore file [datastore2] 0599fa68-1109-4edf-b42e-f81e7f09d641 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1156.394768] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e7e5711-a602-4849-910a-c152c2f4cc43 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.400066] env[63538]: DEBUG oslo_vmware.api [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1156.400066] env[63538]: value = "task-5101796" [ 1156.400066] env[63538]: _type = "Task" [ 1156.400066] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.406797] env[63538]: DEBUG oslo_vmware.api [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Waiting for the task: (returnval){ [ 1156.406797] env[63538]: value = "task-5101797" [ 1156.406797] env[63538]: _type = "Task" [ 1156.406797] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.415389] env[63538]: DEBUG oslo_vmware.api [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101796, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.422155] env[63538]: DEBUG oslo_vmware.api [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Task: {'id': task-5101797, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.790363] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101794, 'name': Rename_Task, 'duration_secs': 0.175746} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.790667] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1156.791039] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bd963934-78e3-4baf-8c82-7a973934999a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.801882] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1156.801882] env[63538]: value = "task-5101798" [ 1156.801882] env[63538]: _type = "Task" [ 1156.801882] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.811418] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101798, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.857781] env[63538]: DEBUG oslo_concurrency.lockutils [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "interface-4387a3ec-0f0b-4917-97f3-08c737bee4e7-880907a0-da53-40af-a1ad-126b284f384c" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1156.859036] env[63538]: DEBUG oslo_concurrency.lockutils [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "interface-4387a3ec-0f0b-4917-97f3-08c737bee4e7-880907a0-da53-40af-a1ad-126b284f384c" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1156.917943] env[63538]: DEBUG oslo_vmware.api [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101796, 'name': Rename_Task, 'duration_secs': 0.204836} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.918830] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1156.919217] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4ea4966f-5dd7-49dc-92a3-6f2580f6e66c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.923870] env[63538]: DEBUG oslo_vmware.api [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Task: {'id': task-5101797, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.223887} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.924530] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1156.924760] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1156.924993] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1156.925198] env[63538]: INFO nova.compute.manager [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1156.925454] env[63538]: DEBUG oslo.service.loopingcall [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1156.925659] env[63538]: DEBUG nova.compute.manager [-] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1156.925754] env[63538]: DEBUG nova.network.neutron [-] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1156.930089] env[63538]: DEBUG oslo_vmware.api [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1156.930089] env[63538]: value = "task-5101799" [ 1156.930089] env[63538]: _type = "Task" [ 1156.930089] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.941120] env[63538]: DEBUG oslo_vmware.api [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101799, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.304138] env[63538]: DEBUG nova.compute.manager [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1157.317097] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101798, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.319687] env[63538]: DEBUG nova.compute.manager [req-47cd1534-c7a9-4130-b645-1b25aa86d794 req-2887cf35-1623-4316-9918-a3e0babbfcbb service nova] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Received event network-vif-deleted-4befc99f-ca57-4a1f-9199-7001666fbd6b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1157.319895] env[63538]: INFO nova.compute.manager [req-47cd1534-c7a9-4130-b645-1b25aa86d794 req-2887cf35-1623-4316-9918-a3e0babbfcbb service nova] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Neutron deleted interface 4befc99f-ca57-4a1f-9199-7001666fbd6b; detaching it from the instance and deleting it from the info cache [ 1157.320087] env[63538]: DEBUG nova.network.neutron [req-47cd1534-c7a9-4130-b645-1b25aa86d794 req-2887cf35-1623-4316-9918-a3e0babbfcbb service nova] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.342283] env[63538]: DEBUG nova.virt.hardware [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1157.342750] env[63538]: DEBUG nova.virt.hardware [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1157.343040] env[63538]: DEBUG nova.virt.hardware [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1157.343426] env[63538]: DEBUG nova.virt.hardware [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1157.343726] env[63538]: DEBUG nova.virt.hardware [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1157.344023] env[63538]: DEBUG nova.virt.hardware [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1157.344381] env[63538]: DEBUG nova.virt.hardware [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1157.344715] env[63538]: DEBUG nova.virt.hardware [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1157.345059] env[63538]: DEBUG nova.virt.hardware [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1157.345364] env[63538]: DEBUG nova.virt.hardware [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1157.345683] env[63538]: DEBUG nova.virt.hardware [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1157.347568] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40175c1b-70e4-4d2f-8840-2d25a959b9f7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.358457] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0810aaa-6597-433a-91b9-51d821bf2091 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.362854] env[63538]: DEBUG oslo_concurrency.lockutils [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1157.363059] env[63538]: DEBUG oslo_concurrency.lockutils [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.364137] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4627558d-088b-4d3d-8351-dc8115e3f78b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.391678] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e533f34-a115-4054-9dc7-ec0e61392b37 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.421219] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Reconfiguring VM to detach interface {{(pid=63538) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1974}} [ 1157.421588] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-975f76e0-561e-49e1-bb03-ed6dec219610 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.444772] env[63538]: DEBUG oslo_vmware.api [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101799, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.446453] env[63538]: DEBUG oslo_vmware.api [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1157.446453] env[63538]: value = "task-5101800" [ 1157.446453] env[63538]: _type = "Task" [ 1157.446453] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.456204] env[63538]: DEBUG oslo_vmware.api [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101800, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.606852] env[63538]: DEBUG nova.compute.manager [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Stashing vm_state: active {{(pid=63538) _prep_resize /opt/stack/nova/nova/compute/manager.py:5670}} [ 1157.750499] env[63538]: DEBUG nova.network.neutron [-] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.816522] env[63538]: DEBUG oslo_vmware.api [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101798, 'name': PowerOnVM_Task, 'duration_secs': 0.748171} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.816857] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1157.817093] env[63538]: DEBUG nova.compute.manager [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1157.817936] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe52de3-12ee-468f-8f30-5990338b8cb1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.823068] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-adecfa19-bb59-433c-a1ea-f2e7063742b5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.837977] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a7fbdb-7ce1-4f9d-a4db-f31462c39b0d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.873371] env[63538]: DEBUG nova.compute.manager [req-47cd1534-c7a9-4130-b645-1b25aa86d794 req-2887cf35-1623-4316-9918-a3e0babbfcbb service nova] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Detach interface failed, port_id=4befc99f-ca57-4a1f-9199-7001666fbd6b, reason: Instance 0599fa68-1109-4edf-b42e-f81e7f09d641 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1157.937801] env[63538]: DEBUG nova.compute.manager [req-e10574ec-8b1d-499f-97f7-92ca4290323d req-e8fb4c88-8803-40e5-9d5a-ef276bc74aed service nova] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Received event network-vif-plugged-9f09c892-0333-4063-a5da-daa3e2bf19f5 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1157.938055] env[63538]: DEBUG oslo_concurrency.lockutils [req-e10574ec-8b1d-499f-97f7-92ca4290323d req-e8fb4c88-8803-40e5-9d5a-ef276bc74aed service nova] Acquiring lock "f0183c1f-4557-45fd-ba65-4821ef661173-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1157.938278] env[63538]: DEBUG oslo_concurrency.lockutils [req-e10574ec-8b1d-499f-97f7-92ca4290323d req-e8fb4c88-8803-40e5-9d5a-ef276bc74aed service nova] Lock "f0183c1f-4557-45fd-ba65-4821ef661173-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1157.938452] env[63538]: DEBUG oslo_concurrency.lockutils [req-e10574ec-8b1d-499f-97f7-92ca4290323d req-e8fb4c88-8803-40e5-9d5a-ef276bc74aed service nova] Lock "f0183c1f-4557-45fd-ba65-4821ef661173-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1157.938625] env[63538]: DEBUG nova.compute.manager [req-e10574ec-8b1d-499f-97f7-92ca4290323d req-e8fb4c88-8803-40e5-9d5a-ef276bc74aed service nova] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] No waiting events found dispatching network-vif-plugged-9f09c892-0333-4063-a5da-daa3e2bf19f5 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1157.938794] env[63538]: WARNING nova.compute.manager [req-e10574ec-8b1d-499f-97f7-92ca4290323d req-e8fb4c88-8803-40e5-9d5a-ef276bc74aed service nova] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Received unexpected event network-vif-plugged-9f09c892-0333-4063-a5da-daa3e2bf19f5 for instance with vm_state building and task_state spawning. [ 1157.953859] env[63538]: DEBUG oslo_vmware.api [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101799, 'name': PowerOnVM_Task, 'duration_secs': 0.648471} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.954691] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1157.954913] env[63538]: INFO nova.compute.manager [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Took 7.97 seconds to spawn the instance on the hypervisor. [ 1157.955117] env[63538]: DEBUG nova.compute.manager [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1157.956272] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c1e25f8-ec04-4de6-b7f3-0b0edb1302b1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.962387] env[63538]: DEBUG oslo_vmware.api [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101800, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.031420] env[63538]: DEBUG nova.network.neutron [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Successfully updated port: 9f09c892-0333-4063-a5da-daa3e2bf19f5 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1158.130678] env[63538]: DEBUG oslo_concurrency.lockutils [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1158.130972] env[63538]: DEBUG oslo_concurrency.lockutils [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1158.252948] env[63538]: INFO nova.compute.manager [-] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Took 1.33 seconds to deallocate network for instance. [ 1158.338845] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1158.457477] env[63538]: DEBUG oslo_vmware.api [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101800, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.478029] env[63538]: INFO nova.compute.manager [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Took 13.72 seconds to build instance. [ 1158.536449] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "refresh_cache-f0183c1f-4557-45fd-ba65-4821ef661173" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1158.536608] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquired lock "refresh_cache-f0183c1f-4557-45fd-ba65-4821ef661173" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1158.537023] env[63538]: DEBUG nova.network.neutron [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1158.636648] env[63538]: INFO nova.compute.claims [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1158.760444] env[63538]: DEBUG oslo_concurrency.lockutils [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1158.961479] env[63538]: DEBUG oslo_vmware.api [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101800, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.980888] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e65538ae-2bc2-43ce-a6d9-2724de8dda95 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "e3feec17-ca1b-4873-bb0a-370c3868aabf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.227s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1159.082054] env[63538]: DEBUG nova.network.neutron [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1159.143231] env[63538]: INFO nova.compute.resource_tracker [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Updating resource usage from migration 29520b30-b1da-45d3-b4d7-f7d1bd5d790a [ 1159.257667] env[63538]: DEBUG nova.network.neutron [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Updating instance_info_cache with network_info: [{"id": "9f09c892-0333-4063-a5da-daa3e2bf19f5", "address": "fa:16:3e:68:a4:9f", "network": {"id": "690e6150-6e85-472d-baeb-85e69afd037b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1831468396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a06b7cc1ab24ba584bbe970e4fc5e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f09c892-03", "ovs_interfaceid": "9f09c892-0333-4063-a5da-daa3e2bf19f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1159.304283] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94cf3192-4f28-439d-814f-505719bab294 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.312731] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f5ff1b6-7686-4dff-be7c-b18ae0e94c85 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.346635] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef7c58f-12d5-4aa5-bae1-a97f00666943 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.354898] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afe4fd2e-a7e2-4ad7-a1f7-81f6f87fa5fa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.368979] env[63538]: DEBUG nova.compute.provider_tree [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1159.461172] env[63538]: DEBUG oslo_vmware.api [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101800, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.504955] env[63538]: DEBUG oslo_concurrency.lockutils [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1159.505248] env[63538]: DEBUG oslo_concurrency.lockutils [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1159.505467] env[63538]: INFO nova.compute.manager [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Shelving [ 1159.521075] env[63538]: DEBUG nova.compute.manager [req-07fb21a1-4eb3-4b8e-894a-8c77895ec375 req-919006e4-2d6d-4616-be74-8566094cc17e service nova] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Received event network-changed-c42aed5e-d684-4b97-aade-4acca4902f3d {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1159.521602] env[63538]: DEBUG nova.compute.manager [req-07fb21a1-4eb3-4b8e-894a-8c77895ec375 req-919006e4-2d6d-4616-be74-8566094cc17e service nova] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Refreshing instance network info cache due to event network-changed-c42aed5e-d684-4b97-aade-4acca4902f3d. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1159.521602] env[63538]: DEBUG oslo_concurrency.lockutils [req-07fb21a1-4eb3-4b8e-894a-8c77895ec375 req-919006e4-2d6d-4616-be74-8566094cc17e service nova] Acquiring lock "refresh_cache-e3feec17-ca1b-4873-bb0a-370c3868aabf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1159.521772] env[63538]: DEBUG oslo_concurrency.lockutils [req-07fb21a1-4eb3-4b8e-894a-8c77895ec375 req-919006e4-2d6d-4616-be74-8566094cc17e service nova] Acquired lock "refresh_cache-e3feec17-ca1b-4873-bb0a-370c3868aabf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.521835] env[63538]: DEBUG nova.network.neutron [req-07fb21a1-4eb3-4b8e-894a-8c77895ec375 req-919006e4-2d6d-4616-be74-8566094cc17e service nova] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Refreshing network info cache for port c42aed5e-d684-4b97-aade-4acca4902f3d {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1159.760272] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Releasing lock "refresh_cache-f0183c1f-4557-45fd-ba65-4821ef661173" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1159.760638] env[63538]: DEBUG nova.compute.manager [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Instance network_info: |[{"id": "9f09c892-0333-4063-a5da-daa3e2bf19f5", "address": "fa:16:3e:68:a4:9f", "network": {"id": "690e6150-6e85-472d-baeb-85e69afd037b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1831468396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a06b7cc1ab24ba584bbe970e4fc5e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f09c892-03", "ovs_interfaceid": "9f09c892-0333-4063-a5da-daa3e2bf19f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1159.761128] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:a4:9f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e31264e2-3e0a-4dfb-ba1f-6389d7d47548', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9f09c892-0333-4063-a5da-daa3e2bf19f5', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1159.769016] env[63538]: DEBUG oslo.service.loopingcall [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1159.769511] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1159.769765] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b19a300c-eb7c-49a6-9921-e0ddbacf9549 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.799359] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1159.799359] env[63538]: value = "task-5101801" [ 1159.799359] env[63538]: _type = "Task" [ 1159.799359] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.808586] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101801, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.871875] env[63538]: DEBUG nova.scheduler.client.report [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1159.968018] env[63538]: DEBUG oslo_vmware.api [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101800, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.970682] env[63538]: DEBUG nova.compute.manager [req-18cdcee8-28cb-4abe-b451-3bc1d62ce5bc req-7689a833-ff8e-4844-9d72-e204f7d11994 service nova] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Received event network-changed-9f09c892-0333-4063-a5da-daa3e2bf19f5 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1159.970932] env[63538]: DEBUG nova.compute.manager [req-18cdcee8-28cb-4abe-b451-3bc1d62ce5bc req-7689a833-ff8e-4844-9d72-e204f7d11994 service nova] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Refreshing instance network info cache due to event network-changed-9f09c892-0333-4063-a5da-daa3e2bf19f5. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1159.971229] env[63538]: DEBUG oslo_concurrency.lockutils [req-18cdcee8-28cb-4abe-b451-3bc1d62ce5bc req-7689a833-ff8e-4844-9d72-e204f7d11994 service nova] Acquiring lock "refresh_cache-f0183c1f-4557-45fd-ba65-4821ef661173" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1159.971393] env[63538]: DEBUG oslo_concurrency.lockutils [req-18cdcee8-28cb-4abe-b451-3bc1d62ce5bc req-7689a833-ff8e-4844-9d72-e204f7d11994 service nova] Acquired lock "refresh_cache-f0183c1f-4557-45fd-ba65-4821ef661173" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.971562] env[63538]: DEBUG nova.network.neutron [req-18cdcee8-28cb-4abe-b451-3bc1d62ce5bc req-7689a833-ff8e-4844-9d72-e204f7d11994 service nova] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Refreshing network info cache for port 9f09c892-0333-4063-a5da-daa3e2bf19f5 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1160.014516] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1160.015224] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-47fa4e86-87bb-45f4-8b55-bece9db578b4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.024086] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1160.024086] env[63538]: value = "task-5101802" [ 1160.024086] env[63538]: _type = "Task" [ 1160.024086] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.038827] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101802, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.311600] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101801, 'name': CreateVM_Task, 'duration_secs': 0.331636} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.312555] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1160.313415] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1160.313629] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.314048] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1160.314375] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3bba8c5-d620-4837-aef6-fdf619fa575e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.321676] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1160.321676] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526a2baa-1ea2-f3d4-100b-d1d8db2abea0" [ 1160.321676] env[63538]: _type = "Task" [ 1160.321676] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.332155] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526a2baa-1ea2-f3d4-100b-d1d8db2abea0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.378406] env[63538]: DEBUG oslo_concurrency.lockutils [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.247s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1160.378658] env[63538]: INFO nova.compute.manager [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Migrating [ 1160.385786] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 2.047s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1160.386054] env[63538]: DEBUG nova.objects.instance [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63538) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1160.463281] env[63538]: DEBUG oslo_vmware.api [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101800, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.506361] env[63538]: DEBUG nova.network.neutron [req-07fb21a1-4eb3-4b8e-894a-8c77895ec375 req-919006e4-2d6d-4616-be74-8566094cc17e service nova] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Updated VIF entry in instance network info cache for port c42aed5e-d684-4b97-aade-4acca4902f3d. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1160.506774] env[63538]: DEBUG nova.network.neutron [req-07fb21a1-4eb3-4b8e-894a-8c77895ec375 req-919006e4-2d6d-4616-be74-8566094cc17e service nova] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Updating instance_info_cache with network_info: [{"id": "c42aed5e-d684-4b97-aade-4acca4902f3d", "address": "fa:16:3e:ea:d4:32", "network": {"id": "64d377b2-188d-4b62-8b84-64d9c351ca10", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1354632003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b1eba931f144b94b6e186dac1310dfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc42aed5e-d6", "ovs_interfaceid": "c42aed5e-d684-4b97-aade-4acca4902f3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1160.534755] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101802, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.687174] env[63538]: DEBUG nova.network.neutron [req-18cdcee8-28cb-4abe-b451-3bc1d62ce5bc req-7689a833-ff8e-4844-9d72-e204f7d11994 service nova] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Updated VIF entry in instance network info cache for port 9f09c892-0333-4063-a5da-daa3e2bf19f5. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1160.687541] env[63538]: DEBUG nova.network.neutron [req-18cdcee8-28cb-4abe-b451-3bc1d62ce5bc req-7689a833-ff8e-4844-9d72-e204f7d11994 service nova] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Updating instance_info_cache with network_info: [{"id": "9f09c892-0333-4063-a5da-daa3e2bf19f5", "address": "fa:16:3e:68:a4:9f", "network": {"id": "690e6150-6e85-472d-baeb-85e69afd037b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1831468396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a06b7cc1ab24ba584bbe970e4fc5e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f09c892-03", "ovs_interfaceid": "9f09c892-0333-4063-a5da-daa3e2bf19f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1160.833628] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526a2baa-1ea2-f3d4-100b-d1d8db2abea0, 'name': SearchDatastore_Task, 'duration_secs': 0.012803} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.833955] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1160.834238] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1160.834492] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1160.834647] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.834833] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1160.835132] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9bef967f-a1ef-4fc4-8f53-d7772af757d8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.844292] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1160.844473] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1160.845288] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5563097f-1b1e-4d4a-a712-b9c495eb95d0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.851074] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1160.851074] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520877ef-3bb8-e93e-33d0-ac016d0ffced" [ 1160.851074] env[63538]: _type = "Task" [ 1160.851074] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.858866] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520877ef-3bb8-e93e-33d0-ac016d0ffced, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.900084] env[63538]: DEBUG oslo_concurrency.lockutils [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "refresh_cache-e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1160.900258] env[63538]: DEBUG oslo_concurrency.lockutils [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired lock "refresh_cache-e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.900344] env[63538]: DEBUG nova.network.neutron [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1160.962012] env[63538]: DEBUG oslo_vmware.api [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101800, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.009436] env[63538]: DEBUG oslo_concurrency.lockutils [req-07fb21a1-4eb3-4b8e-894a-8c77895ec375 req-919006e4-2d6d-4616-be74-8566094cc17e service nova] Releasing lock "refresh_cache-e3feec17-ca1b-4873-bb0a-370c3868aabf" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1161.035478] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101802, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.190830] env[63538]: DEBUG oslo_concurrency.lockutils [req-18cdcee8-28cb-4abe-b451-3bc1d62ce5bc req-7689a833-ff8e-4844-9d72-e204f7d11994 service nova] Releasing lock "refresh_cache-f0183c1f-4557-45fd-ba65-4821ef661173" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1161.362175] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520877ef-3bb8-e93e-33d0-ac016d0ffced, 'name': SearchDatastore_Task, 'duration_secs': 0.010105} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.363072] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28cb8921-3381-48ee-96e7-46a86cbb3a30 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.369313] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1161.369313] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521ac898-9a9e-d888-a3d8-0fbb6666a8d6" [ 1161.369313] env[63538]: _type = "Task" [ 1161.369313] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.378386] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521ac898-9a9e-d888-a3d8-0fbb6666a8d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.403075] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c0dd2ba6-57fe-445a-ae28-632356cf7ed3 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1161.406908] env[63538]: DEBUG oslo_concurrency.lockutils [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.646s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1161.406908] env[63538]: DEBUG nova.objects.instance [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Lazy-loading 'resources' on Instance uuid 0599fa68-1109-4edf-b42e-f81e7f09d641 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1161.463016] env[63538]: DEBUG oslo_vmware.api [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101800, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.535605] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101802, 'name': PowerOffVM_Task, 'duration_secs': 1.218654} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.535907] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1161.536794] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ba326f6-3348-4703-95e7-6b5c7c82a009 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.559332] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba318eb4-2639-4d4e-a780-63d3d8469b4a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.667981] env[63538]: DEBUG nova.network.neutron [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Updating instance_info_cache with network_info: [{"id": "c8b86754-970c-4f8a-b3fb-ec8fb42d3863", "address": "fa:16:3e:e0:99:90", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8b86754-97", "ovs_interfaceid": "c8b86754-970c-4f8a-b3fb-ec8fb42d3863", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1161.880346] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521ac898-9a9e-d888-a3d8-0fbb6666a8d6, 'name': SearchDatastore_Task, 'duration_secs': 0.011317} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.880652] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1161.880891] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] f0183c1f-4557-45fd-ba65-4821ef661173/f0183c1f-4557-45fd-ba65-4821ef661173.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1161.881220] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-19f29de2-b553-48d6-840b-2a422d26c40d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.889424] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1161.889424] env[63538]: value = "task-5101803" [ 1161.889424] env[63538]: _type = "Task" [ 1161.889424] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.897749] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101803, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.966225] env[63538]: DEBUG oslo_vmware.api [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101800, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.068901] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357e1bb8-46ae-44ad-9cd2-15590141bb8a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.072773] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Creating Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1162.073144] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7df093a5-103c-478a-9c7d-d6864c606fa6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.080714] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f5814b-a938-47c4-a0b4-49c3ab9fc4de {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.086034] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1162.086034] env[63538]: value = "task-5101804" [ 1162.086034] env[63538]: _type = "Task" [ 1162.086034] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.120554] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebc3b0f-bef5-489d-bcaa-d40f0b26529c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.126301] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101804, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.133501] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38570c7f-3556-4220-a904-2ba5f959f0ac {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.150152] env[63538]: DEBUG nova.compute.provider_tree [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1162.171355] env[63538]: DEBUG oslo_concurrency.lockutils [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Releasing lock "refresh_cache-e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1162.401466] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101803, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.463136] env[63538]: DEBUG oslo_vmware.api [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101800, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.597858] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101804, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.654228] env[63538]: DEBUG nova.scheduler.client.report [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1162.905313] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101803, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.559369} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.905587] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] f0183c1f-4557-45fd-ba65-4821ef661173/f0183c1f-4557-45fd-ba65-4821ef661173.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1162.905813] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1162.906088] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2d56de13-d691-4911-aab5-136f9e9ec3dc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.914189] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1162.914189] env[63538]: value = "task-5101805" [ 1162.914189] env[63538]: _type = "Task" [ 1162.914189] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.923007] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101805, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.964587] env[63538]: DEBUG oslo_vmware.api [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101800, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.098465] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101804, 'name': CreateSnapshot_Task, 'duration_secs': 0.549962} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.098865] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Created Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1163.099446] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b89d75ed-a154-46b3-8415-363e89737200 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.161237] env[63538]: DEBUG oslo_concurrency.lockutils [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.753s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1163.186986] env[63538]: INFO nova.scheduler.client.report [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Deleted allocations for instance 0599fa68-1109-4edf-b42e-f81e7f09d641 [ 1163.424522] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101805, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073518} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.424834] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1163.425704] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c727076-b3af-433e-96bc-8094d942b64e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.448647] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] f0183c1f-4557-45fd-ba65-4821ef661173/f0183c1f-4557-45fd-ba65-4821ef661173.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1163.450215] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-96b6a201-902c-4adb-a66d-9934b95d467b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.477497] env[63538]: DEBUG oslo_vmware.api [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101800, 'name': ReconfigVM_Task, 'duration_secs': 5.783538} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.479379] env[63538]: DEBUG oslo_concurrency.lockutils [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1163.479606] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Reconfigured VM to detach interface {{(pid=63538) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1984}} [ 1163.482190] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1163.482190] env[63538]: value = "task-5101806" [ 1163.482190] env[63538]: _type = "Task" [ 1163.482190] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.492479] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101806, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.618488] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Creating linked-clone VM from snapshot {{(pid=63538) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1163.619229] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6118b380-5d1a-4fd7-9467-18b5a8e6db09 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.631390] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1163.631390] env[63538]: value = "task-5101807" [ 1163.631390] env[63538]: _type = "Task" [ 1163.631390] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.640515] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101807, 'name': CloneVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.692995] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f10b4a-17c8-4ef5-bdda-0ddf70359414 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.698905] env[63538]: DEBUG oslo_concurrency.lockutils [None req-af8ed565-e97a-49ca-bb0e-e282ee62ecd2 tempest-InstanceActionsV221TestJSON-123511877 tempest-InstanceActionsV221TestJSON-123511877-project-member] Lock "0599fa68-1109-4edf-b42e-f81e7f09d641" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.954s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1163.716878] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Updating instance 'e0d5a3b2-21e1-4de0-ac10-1a5687a60c10' progress to 0 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1163.969481] env[63538]: INFO nova.compute.manager [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Rebuilding instance [ 1163.994219] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101806, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.022105] env[63538]: DEBUG nova.compute.manager [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1164.023056] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c693cd-fafc-47c9-a2a6-b8e5459a9613 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.142416] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101807, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.226245] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1164.226506] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d3a6999d-4d88-4f60-bb31-9137646d6c3d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.235090] env[63538]: DEBUG oslo_vmware.api [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1164.235090] env[63538]: value = "task-5101808" [ 1164.235090] env[63538]: _type = "Task" [ 1164.235090] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.244819] env[63538]: DEBUG oslo_vmware.api [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101808, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.494289] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101806, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.536293] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1164.536618] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4d513665-3df4-4655-b9b6-2ade686a26ac {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.545484] env[63538]: DEBUG oslo_vmware.api [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Waiting for the task: (returnval){ [ 1164.545484] env[63538]: value = "task-5101809" [ 1164.545484] env[63538]: _type = "Task" [ 1164.545484] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.555881] env[63538]: DEBUG oslo_vmware.api [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101809, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.642902] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101807, 'name': CloneVM_Task} progress is 95%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.746993] env[63538]: DEBUG oslo_vmware.api [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101808, 'name': PowerOffVM_Task, 'duration_secs': 0.386452} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.746993] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1164.747313] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Updating instance 'e0d5a3b2-21e1-4de0-ac10-1a5687a60c10' progress to 17 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1164.974211] env[63538]: DEBUG oslo_concurrency.lockutils [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "refresh_cache-4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1164.974416] env[63538]: DEBUG oslo_concurrency.lockutils [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquired lock "refresh_cache-4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1164.974604] env[63538]: DEBUG nova.network.neutron [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1164.994831] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101806, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.055868] env[63538]: DEBUG oslo_vmware.api [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101809, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.144702] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101807, 'name': CloneVM_Task, 'duration_secs': 1.415259} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.145160] env[63538]: INFO nova.virt.vmwareapi.vmops [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Created linked-clone VM from snapshot [ 1165.145805] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c8a0f07-18fa-4907-9389-a69386c43a02 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.154925] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Uploading image fc39aa85-5f54-44b2-83e7-fcf99170aec7 {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 1165.185398] env[63538]: DEBUG oslo_vmware.rw_handles [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1165.185398] env[63538]: value = "vm-992536" [ 1165.185398] env[63538]: _type = "VirtualMachine" [ 1165.185398] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1165.185740] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-97896c2c-9957-4d96-b8f1-c639a8997af6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.193770] env[63538]: DEBUG oslo_vmware.rw_handles [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lease: (returnval){ [ 1165.193770] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f7e65c-a9dd-a433-b513-98df5fdb93bb" [ 1165.193770] env[63538]: _type = "HttpNfcLease" [ 1165.193770] env[63538]: } obtained for exporting VM: (result){ [ 1165.193770] env[63538]: value = "vm-992536" [ 1165.193770] env[63538]: _type = "VirtualMachine" [ 1165.193770] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1165.194137] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the lease: (returnval){ [ 1165.194137] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f7e65c-a9dd-a433-b513-98df5fdb93bb" [ 1165.194137] env[63538]: _type = "HttpNfcLease" [ 1165.194137] env[63538]: } to be ready. {{(pid=63538) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1165.202042] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1165.202042] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f7e65c-a9dd-a433-b513-98df5fdb93bb" [ 1165.202042] env[63538]: _type = "HttpNfcLease" [ 1165.202042] env[63538]: } is initializing. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1165.255405] env[63538]: DEBUG nova.virt.hardware [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1165.255772] env[63538]: DEBUG nova.virt.hardware [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1165.255889] env[63538]: DEBUG nova.virt.hardware [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1165.256180] env[63538]: DEBUG nova.virt.hardware [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1165.256513] env[63538]: DEBUG nova.virt.hardware [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1165.256825] env[63538]: DEBUG nova.virt.hardware [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1165.257208] env[63538]: DEBUG nova.virt.hardware [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1165.257502] env[63538]: DEBUG nova.virt.hardware [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1165.257840] env[63538]: DEBUG nova.virt.hardware [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1165.258142] env[63538]: DEBUG nova.virt.hardware [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1165.258461] env[63538]: DEBUG nova.virt.hardware [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1165.264462] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8674d258-ef63-4ccd-9830-ea8d27a823df {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.283622] env[63538]: DEBUG oslo_vmware.api [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1165.283622] env[63538]: value = "task-5101811" [ 1165.283622] env[63538]: _type = "Task" [ 1165.283622] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.294532] env[63538]: DEBUG oslo_vmware.api [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101811, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.321446] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "4387a3ec-0f0b-4917-97f3-08c737bee4e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.321643] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "4387a3ec-0f0b-4917-97f3-08c737bee4e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.322720] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "4387a3ec-0f0b-4917-97f3-08c737bee4e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.322720] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "4387a3ec-0f0b-4917-97f3-08c737bee4e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.322720] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "4387a3ec-0f0b-4917-97f3-08c737bee4e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1165.324556] env[63538]: INFO nova.compute.manager [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Terminating instance [ 1165.326574] env[63538]: DEBUG nova.compute.manager [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1165.326862] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1165.327893] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8fae64b-df15-41e1-865e-f736ff981322 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.338110] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1165.338110] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c03d74ab-1f2e-48c3-97e9-993155829dcb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.346689] env[63538]: DEBUG oslo_vmware.api [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1165.346689] env[63538]: value = "task-5101812" [ 1165.346689] env[63538]: _type = "Task" [ 1165.346689] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.361344] env[63538]: DEBUG oslo_vmware.api [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101812, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.496171] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101806, 'name': ReconfigVM_Task, 'duration_secs': 1.653198} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.496507] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Reconfigured VM instance instance-00000071 to attach disk [datastore1] f0183c1f-4557-45fd-ba65-4821ef661173/f0183c1f-4557-45fd-ba65-4821ef661173.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1165.497376] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-11e1be28-8b2b-41d1-add6-0902d898223a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.506364] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1165.506364] env[63538]: value = "task-5101813" [ 1165.506364] env[63538]: _type = "Task" [ 1165.506364] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.518623] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101813, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.557130] env[63538]: DEBUG oslo_vmware.api [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101809, 'name': PowerOffVM_Task, 'duration_secs': 0.760141} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.557416] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1165.558216] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1165.558504] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-31c691ed-7afc-4436-bfa0-75fe7efd9ff0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.567753] env[63538]: DEBUG oslo_vmware.api [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Waiting for the task: (returnval){ [ 1165.567753] env[63538]: value = "task-5101814" [ 1165.567753] env[63538]: _type = "Task" [ 1165.567753] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.579080] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] VM already powered off {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1165.579080] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Volume detach. Driver type: vmdk {{(pid=63538) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1165.579287] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992508', 'volume_id': 'f66d7fd1-7450-49dd-aedf-14fdf30f2e90', 'name': 'volume-f66d7fd1-7450-49dd-aedf-14fdf30f2e90', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'aaf52cad-86fd-42df-8ee3-13724e3f5e94', 'attached_at': '', 'detached_at': '', 'volume_id': 'f66d7fd1-7450-49dd-aedf-14fdf30f2e90', 'serial': 'f66d7fd1-7450-49dd-aedf-14fdf30f2e90'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1165.580083] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-398e9159-0c86-4bc1-b183-6249c5cbd557 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.602578] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45fa4b70-e0d9-41bb-aaf0-2f8e56324f4f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.616302] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ae33335-ad90-4183-bd24-406be8967d0e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.636219] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20fec34e-c9e5-4409-88e0-52ef573ebeab {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.652910] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] The volume has not been displaced from its original location: [datastore1] volume-f66d7fd1-7450-49dd-aedf-14fdf30f2e90/volume-f66d7fd1-7450-49dd-aedf-14fdf30f2e90.vmdk. No consolidation needed. {{(pid=63538) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1165.658741] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Reconfiguring VM instance instance-0000006c to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1165.661672] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a23c24f7-c358-41a4-8ac4-f4d0e64ef25d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.683020] env[63538]: DEBUG oslo_vmware.api [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Waiting for the task: (returnval){ [ 1165.683020] env[63538]: value = "task-5101815" [ 1165.683020] env[63538]: _type = "Task" [ 1165.683020] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.692681] env[63538]: DEBUG oslo_vmware.api [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101815, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.704430] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1165.704430] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f7e65c-a9dd-a433-b513-98df5fdb93bb" [ 1165.704430] env[63538]: _type = "HttpNfcLease" [ 1165.704430] env[63538]: } is ready. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1165.704792] env[63538]: DEBUG oslo_vmware.rw_handles [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1165.704792] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f7e65c-a9dd-a433-b513-98df5fdb93bb" [ 1165.704792] env[63538]: _type = "HttpNfcLease" [ 1165.704792] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1165.705672] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ea236c-90b7-4b7a-8b31-628326884df0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.714481] env[63538]: DEBUG oslo_vmware.rw_handles [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bd865b-caa5-7fc4-55dc-7c1d9aafde5a/disk-0.vmdk from lease info. {{(pid=63538) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1165.714689] env[63538]: DEBUG oslo_vmware.rw_handles [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bd865b-caa5-7fc4-55dc-7c1d9aafde5a/disk-0.vmdk for reading. {{(pid=63538) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1165.794367] env[63538]: DEBUG oslo_vmware.api [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101811, 'name': ReconfigVM_Task, 'duration_secs': 0.220787} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.794682] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Updating instance 'e0d5a3b2-21e1-4de0-ac10-1a5687a60c10' progress to 33 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1165.824442] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2f26100c-ad45-40fd-8824-3cbf8ddc556d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.861872] env[63538]: DEBUG oslo_vmware.api [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101812, 'name': PowerOffVM_Task, 'duration_secs': 0.221636} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.863523] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1165.863523] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1165.863523] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-087b6d1a-f00c-4c37-9b22-fba4ea26f377 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.872696] env[63538]: INFO nova.network.neutron [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Port 880907a0-da53-40af-a1ad-126b284f384c from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1165.873073] env[63538]: DEBUG nova.network.neutron [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Updating instance_info_cache with network_info: [{"id": "f31eeedc-851d-457f-8464-c8562fdeaf87", "address": "fa:16:3e:8d:d6:64", "network": {"id": "44a04a43-a979-4648-89b2-63323df5b0f3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-826825777-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7063c42297c24f2baf7271fa25dec927", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf31eeedc-85", "ovs_interfaceid": "f31eeedc-851d-457f-8464-c8562fdeaf87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1165.943434] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1165.943673] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1165.946220] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Deleting the datastore file [datastore1] 4387a3ec-0f0b-4917-97f3-08c737bee4e7 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1165.946220] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e381606-082c-41ad-a117-8159dc4b61f1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.955283] env[63538]: DEBUG oslo_vmware.api [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1165.955283] env[63538]: value = "task-5101817" [ 1165.955283] env[63538]: _type = "Task" [ 1165.955283] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.966622] env[63538]: DEBUG oslo_vmware.api [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101817, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.017180] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101813, 'name': Rename_Task, 'duration_secs': 0.23438} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.017509] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1166.017800] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81585b05-ec50-48ce-b9e5-27988aabf580 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.025395] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1166.025395] env[63538]: value = "task-5101818" [ 1166.025395] env[63538]: _type = "Task" [ 1166.025395] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.037587] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101818, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.194886] env[63538]: DEBUG oslo_vmware.api [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101815, 'name': ReconfigVM_Task, 'duration_secs': 0.183299} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.195512] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Reconfigured VM instance instance-0000006c to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1166.200729] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7d9993f-8393-4bf7-acf3-efe8f2d005fe {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.218696] env[63538]: DEBUG oslo_vmware.api [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Waiting for the task: (returnval){ [ 1166.218696] env[63538]: value = "task-5101819" [ 1166.218696] env[63538]: _type = "Task" [ 1166.218696] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.229527] env[63538]: DEBUG oslo_vmware.api [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101819, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.302098] env[63538]: DEBUG nova.virt.hardware [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1166.302230] env[63538]: DEBUG nova.virt.hardware [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1166.302395] env[63538]: DEBUG nova.virt.hardware [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1166.302685] env[63538]: DEBUG nova.virt.hardware [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1166.302916] env[63538]: DEBUG nova.virt.hardware [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1166.303121] env[63538]: DEBUG nova.virt.hardware [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1166.303704] env[63538]: DEBUG nova.virt.hardware [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1166.304107] env[63538]: DEBUG nova.virt.hardware [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1166.304423] env[63538]: DEBUG nova.virt.hardware [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1166.304617] env[63538]: DEBUG nova.virt.hardware [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1166.304804] env[63538]: DEBUG nova.virt.hardware [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1166.311665] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Reconfiguring VM instance instance-0000006b to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1166.312159] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ba9c6a0-c057-43c3-b3cc-55ad3d194e98 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.335041] env[63538]: DEBUG oslo_vmware.api [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1166.335041] env[63538]: value = "task-5101820" [ 1166.335041] env[63538]: _type = "Task" [ 1166.335041] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.348317] env[63538]: DEBUG oslo_vmware.api [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101820, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.375510] env[63538]: DEBUG oslo_concurrency.lockutils [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Releasing lock "refresh_cache-4387a3ec-0f0b-4917-97f3-08c737bee4e7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1166.466656] env[63538]: DEBUG oslo_vmware.api [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101817, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201316} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.467100] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1166.467540] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1166.467677] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1166.467917] env[63538]: INFO nova.compute.manager [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1166.468248] env[63538]: DEBUG oslo.service.loopingcall [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1166.469061] env[63538]: DEBUG nova.compute.manager [-] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1166.469061] env[63538]: DEBUG nova.network.neutron [-] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1166.537711] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101818, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.729980] env[63538]: DEBUG oslo_vmware.api [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101819, 'name': ReconfigVM_Task, 'duration_secs': 0.197542} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.730418] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992508', 'volume_id': 'f66d7fd1-7450-49dd-aedf-14fdf30f2e90', 'name': 'volume-f66d7fd1-7450-49dd-aedf-14fdf30f2e90', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'aaf52cad-86fd-42df-8ee3-13724e3f5e94', 'attached_at': '', 'detached_at': '', 'volume_id': 'f66d7fd1-7450-49dd-aedf-14fdf30f2e90', 'serial': 'f66d7fd1-7450-49dd-aedf-14fdf30f2e90'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1166.730723] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1166.731642] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4279a677-6a97-4f00-abc8-5e70b2b25e8d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.740470] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1166.740874] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ef261d3-246c-42ac-af4c-4447df96a5cb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.819018] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1166.819330] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1166.819512] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Deleting the datastore file [datastore1] aaf52cad-86fd-42df-8ee3-13724e3f5e94 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1166.819790] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-537294de-7077-48aa-8c6a-dbaa511d00d7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.828777] env[63538]: DEBUG oslo_vmware.api [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Waiting for the task: (returnval){ [ 1166.828777] env[63538]: value = "task-5101822" [ 1166.828777] env[63538]: _type = "Task" [ 1166.828777] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.846364] env[63538]: DEBUG oslo_vmware.api [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101820, 'name': ReconfigVM_Task, 'duration_secs': 0.29161} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.852997] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Reconfigured VM instance instance-0000006b to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1166.853719] env[63538]: DEBUG oslo_vmware.api [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101822, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.855160] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-244e86a9-9139-4a0e-bb9c-d056b77e7f76 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.883016] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] e0d5a3b2-21e1-4de0-ac10-1a5687a60c10/e0d5a3b2-21e1-4de0-ac10-1a5687a60c10.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1166.884937] env[63538]: DEBUG oslo_concurrency.lockutils [None req-78d32470-f8b5-4f6c-8f7d-5480a0b497bf tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "interface-4387a3ec-0f0b-4917-97f3-08c737bee4e7-880907a0-da53-40af-a1ad-126b284f384c" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.026s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.886292] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4caa1ee-55b0-4376-b106-750cc8bd391d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.908300] env[63538]: DEBUG oslo_vmware.api [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1166.908300] env[63538]: value = "task-5101823" [ 1166.908300] env[63538]: _type = "Task" [ 1166.908300] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.919625] env[63538]: DEBUG oslo_vmware.api [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101823, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.040437] env[63538]: DEBUG oslo_vmware.api [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101818, 'name': PowerOnVM_Task, 'duration_secs': 0.612758} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.040727] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1167.041068] env[63538]: INFO nova.compute.manager [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Took 9.74 seconds to spawn the instance on the hypervisor. [ 1167.041149] env[63538]: DEBUG nova.compute.manager [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1167.041999] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e79cc9e4-5b92-4a39-841e-51b18e54bb82 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.343622] env[63538]: DEBUG oslo_vmware.api [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Task: {'id': task-5101822, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.100105} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.343622] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1167.343622] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1167.343622] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1167.414084] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Volume detach. Driver type: vmdk {{(pid=63538) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1167.414482] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f63bb856-277d-4f08-8975-0d3f530d4170 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.424320] env[63538]: DEBUG oslo_vmware.api [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101823, 'name': ReconfigVM_Task, 'duration_secs': 0.465827} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.425857] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Reconfigured VM instance instance-0000006b to attach disk [datastore2] e0d5a3b2-21e1-4de0-ac10-1a5687a60c10/e0d5a3b2-21e1-4de0-ac10-1a5687a60c10.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1167.426245] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Updating instance 'e0d5a3b2-21e1-4de0-ac10-1a5687a60c10' progress to 50 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1167.433010] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebcdf148-fa02-41a2-868e-0202a9ac50a0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.447555] env[63538]: DEBUG nova.compute.manager [req-f344e649-a45c-45e1-b62a-d0503b179e78 req-ee18a990-622f-42cb-9ff7-f77d9156f470 service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Received event network-vif-deleted-f31eeedc-851d-457f-8464-c8562fdeaf87 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1167.447778] env[63538]: INFO nova.compute.manager [req-f344e649-a45c-45e1-b62a-d0503b179e78 req-ee18a990-622f-42cb-9ff7-f77d9156f470 service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Neutron deleted interface f31eeedc-851d-457f-8464-c8562fdeaf87; detaching it from the instance and deleting it from the info cache [ 1167.448072] env[63538]: DEBUG nova.network.neutron [req-f344e649-a45c-45e1-b62a-d0503b179e78 req-ee18a990-622f-42cb-9ff7-f77d9156f470 service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.478092] env[63538]: ERROR nova.compute.manager [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Failed to detach volume f66d7fd1-7450-49dd-aedf-14fdf30f2e90 from /dev/sda: nova.exception.InstanceNotFound: Instance aaf52cad-86fd-42df-8ee3-13724e3f5e94 could not be found. [ 1167.478092] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Traceback (most recent call last): [ 1167.478092] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/nova/nova/compute/manager.py", line 4143, in _do_rebuild_instance [ 1167.478092] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] self.driver.rebuild(**kwargs) [ 1167.478092] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 1167.478092] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] raise NotImplementedError() [ 1167.478092] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] NotImplementedError [ 1167.478092] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] [ 1167.478092] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] During handling of the above exception, another exception occurred: [ 1167.478092] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] [ 1167.478092] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Traceback (most recent call last): [ 1167.478092] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/nova/nova/compute/manager.py", line 3566, in _detach_root_volume [ 1167.478092] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] self.driver.detach_volume(context, old_connection_info, [ 1167.478092] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 553, in detach_volume [ 1167.478092] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] return self._volumeops.detach_volume(connection_info, instance) [ 1167.478092] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1167.478092] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] self._detach_volume_vmdk(connection_info, instance) [ 1167.478092] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1167.478092] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1167.478092] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1135, in get_vm_ref [ 1167.478092] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] stable_ref.fetch_moref(session) [ 1167.478092] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1126, in fetch_moref [ 1167.478092] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1167.478092] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] nova.exception.InstanceNotFound: Instance aaf52cad-86fd-42df-8ee3-13724e3f5e94 could not be found. [ 1167.478092] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] [ 1167.568845] env[63538]: INFO nova.compute.manager [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Took 14.57 seconds to build instance. [ 1167.662849] env[63538]: DEBUG nova.compute.utils [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Build of instance aaf52cad-86fd-42df-8ee3-13724e3f5e94 aborted: Failed to rebuild volume backed instance. {{(pid=63538) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1167.666544] env[63538]: ERROR nova.compute.manager [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance aaf52cad-86fd-42df-8ee3-13724e3f5e94 aborted: Failed to rebuild volume backed instance. [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Traceback (most recent call last): [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/nova/nova/compute/manager.py", line 4143, in _do_rebuild_instance [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] self.driver.rebuild(**kwargs) [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] raise NotImplementedError() [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] NotImplementedError [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] During handling of the above exception, another exception occurred: [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Traceback (most recent call last): [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/nova/nova/compute/manager.py", line 3601, in _rebuild_volume_backed_instance [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] self._detach_root_volume(context, instance, root_bdm) [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/nova/nova/compute/manager.py", line 3580, in _detach_root_volume [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] with excutils.save_and_reraise_exception(): [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] self.force_reraise() [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] raise self.value [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/nova/nova/compute/manager.py", line 3566, in _detach_root_volume [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] self.driver.detach_volume(context, old_connection_info, [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 553, in detach_volume [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] return self._volumeops.detach_volume(connection_info, instance) [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] self._detach_volume_vmdk(connection_info, instance) [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1135, in get_vm_ref [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] stable_ref.fetch_moref(session) [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1126, in fetch_moref [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] nova.exception.InstanceNotFound: Instance aaf52cad-86fd-42df-8ee3-13724e3f5e94 could not be found. [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] [ 1167.666544] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] During handling of the above exception, another exception occurred: [ 1167.667788] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] [ 1167.667788] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Traceback (most recent call last): [ 1167.667788] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/nova/nova/compute/manager.py", line 10954, in _error_out_instance_on_exception [ 1167.667788] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] yield [ 1167.667788] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/nova/nova/compute/manager.py", line 3869, in rebuild_instance [ 1167.667788] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] self._do_rebuild_instance_with_claim( [ 1167.667788] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/nova/nova/compute/manager.py", line 3955, in _do_rebuild_instance_with_claim [ 1167.667788] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] self._do_rebuild_instance( [ 1167.667788] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/nova/nova/compute/manager.py", line 4147, in _do_rebuild_instance [ 1167.667788] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] self._rebuild_default_impl(**kwargs) [ 1167.667788] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/nova/nova/compute/manager.py", line 3724, in _rebuild_default_impl [ 1167.667788] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] self._rebuild_volume_backed_instance( [ 1167.667788] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] File "/opt/stack/nova/nova/compute/manager.py", line 3616, in _rebuild_volume_backed_instance [ 1167.667788] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] raise exception.BuildAbortException( [ 1167.667788] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] nova.exception.BuildAbortException: Build of instance aaf52cad-86fd-42df-8ee3-13724e3f5e94 aborted: Failed to rebuild volume backed instance. [ 1167.667788] env[63538]: ERROR nova.compute.manager [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] [ 1167.901463] env[63538]: DEBUG nova.network.neutron [-] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.948422] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d5b9a2-a8e9-4bcc-be20-ee8d87246578 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.952118] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a3fa5eef-a21f-4b7d-a136-9ddc3804f616 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.981060] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee5aa5a-162b-43e9-bb1b-4c49e63d7d76 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.988505] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1598db7-2384-461d-977b-1385b6e81f57 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.019070] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Updating instance 'e0d5a3b2-21e1-4de0-ac10-1a5687a60c10' progress to 67 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1168.032837] env[63538]: DEBUG nova.compute.manager [req-f344e649-a45c-45e1-b62a-d0503b179e78 req-ee18a990-622f-42cb-9ff7-f77d9156f470 service nova] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Detach interface failed, port_id=f31eeedc-851d-457f-8464-c8562fdeaf87, reason: Instance 4387a3ec-0f0b-4917-97f3-08c737bee4e7 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1168.072301] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b47f8204-f8e3-4e01-b2c1-ae1dc1ce0fd6 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "f0183c1f-4557-45fd-ba65-4821ef661173" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.080s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1168.185099] env[63538]: DEBUG oslo_concurrency.lockutils [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquiring lock "6a8de8d2-aa15-4057-a936-57cad9c8b1d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1168.185868] env[63538]: DEBUG oslo_concurrency.lockutils [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Lock "6a8de8d2-aa15-4057-a936-57cad9c8b1d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1168.293552] env[63538]: DEBUG nova.compute.manager [req-5f20cbc5-06a6-44f4-8ea0-46fdfb308fbc req-776f5316-ae17-417a-9315-fb47680576f7 service nova] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Received event network-changed-9f09c892-0333-4063-a5da-daa3e2bf19f5 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1168.293808] env[63538]: DEBUG nova.compute.manager [req-5f20cbc5-06a6-44f4-8ea0-46fdfb308fbc req-776f5316-ae17-417a-9315-fb47680576f7 service nova] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Refreshing instance network info cache due to event network-changed-9f09c892-0333-4063-a5da-daa3e2bf19f5. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1168.294101] env[63538]: DEBUG oslo_concurrency.lockutils [req-5f20cbc5-06a6-44f4-8ea0-46fdfb308fbc req-776f5316-ae17-417a-9315-fb47680576f7 service nova] Acquiring lock "refresh_cache-f0183c1f-4557-45fd-ba65-4821ef661173" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1168.294345] env[63538]: DEBUG oslo_concurrency.lockutils [req-5f20cbc5-06a6-44f4-8ea0-46fdfb308fbc req-776f5316-ae17-417a-9315-fb47680576f7 service nova] Acquired lock "refresh_cache-f0183c1f-4557-45fd-ba65-4821ef661173" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.294644] env[63538]: DEBUG nova.network.neutron [req-5f20cbc5-06a6-44f4-8ea0-46fdfb308fbc req-776f5316-ae17-417a-9315-fb47680576f7 service nova] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Refreshing network info cache for port 9f09c892-0333-4063-a5da-daa3e2bf19f5 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1168.404873] env[63538]: INFO nova.compute.manager [-] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Took 1.94 seconds to deallocate network for instance. [ 1168.451388] env[63538]: DEBUG oslo_concurrency.lockutils [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquiring lock "d91a140b-6ca9-4c0e-b433-795d2014975c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1168.451728] env[63538]: DEBUG oslo_concurrency.lockutils [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Lock "d91a140b-6ca9-4c0e-b433-795d2014975c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1168.570585] env[63538]: DEBUG nova.network.neutron [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Port c8b86754-970c-4f8a-b3fb-ec8fb42d3863 binding to destination host cpu-1 is already ACTIVE {{(pid=63538) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1168.688908] env[63538]: DEBUG nova.compute.manager [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1168.912960] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1168.913320] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1168.913601] env[63538]: DEBUG nova.objects.instance [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lazy-loading 'resources' on Instance uuid 4387a3ec-0f0b-4917-97f3-08c737bee4e7 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1168.956983] env[63538]: DEBUG nova.compute.manager [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1169.071389] env[63538]: DEBUG nova.network.neutron [req-5f20cbc5-06a6-44f4-8ea0-46fdfb308fbc req-776f5316-ae17-417a-9315-fb47680576f7 service nova] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Updated VIF entry in instance network info cache for port 9f09c892-0333-4063-a5da-daa3e2bf19f5. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1169.071840] env[63538]: DEBUG nova.network.neutron [req-5f20cbc5-06a6-44f4-8ea0-46fdfb308fbc req-776f5316-ae17-417a-9315-fb47680576f7 service nova] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Updating instance_info_cache with network_info: [{"id": "9f09c892-0333-4063-a5da-daa3e2bf19f5", "address": "fa:16:3e:68:a4:9f", "network": {"id": "690e6150-6e85-472d-baeb-85e69afd037b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1831468396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a06b7cc1ab24ba584bbe970e4fc5e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f09c892-03", "ovs_interfaceid": "9f09c892-0333-4063-a5da-daa3e2bf19f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1169.219009] env[63538]: DEBUG oslo_concurrency.lockutils [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1169.483153] env[63538]: DEBUG oslo_concurrency.lockutils [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1169.579469] env[63538]: DEBUG oslo_concurrency.lockutils [req-5f20cbc5-06a6-44f4-8ea0-46fdfb308fbc req-776f5316-ae17-417a-9315-fb47680576f7 service nova] Releasing lock "refresh_cache-f0183c1f-4557-45fd-ba65-4821ef661173" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1169.598089] env[63538]: DEBUG oslo_concurrency.lockutils [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "e0d5a3b2-21e1-4de0-ac10-1a5687a60c10-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1169.598478] env[63538]: DEBUG oslo_concurrency.lockutils [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "e0d5a3b2-21e1-4de0-ac10-1a5687a60c10-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1169.598590] env[63538]: DEBUG oslo_concurrency.lockutils [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "e0d5a3b2-21e1-4de0-ac10-1a5687a60c10-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1169.602216] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57638de-a73e-407b-90d2-67a4b7a05e60 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.613895] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f207757e-f4db-4e81-9c53-e9e90dea8522 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.651092] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc28908d-b00b-4d40-995e-308be5064324 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.659680] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c3e83c7-04eb-4510-9323-5851729c1c69 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.675021] env[63538]: DEBUG nova.compute.provider_tree [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1169.683538] env[63538]: DEBUG oslo_concurrency.lockutils [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.178031] env[63538]: DEBUG nova.scheduler.client.report [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1170.235606] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce164d0b-b604-4abc-8749-8dd242e8cca3 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Acquiring lock "aaf52cad-86fd-42df-8ee3-13724e3f5e94" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.235877] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce164d0b-b604-4abc-8749-8dd242e8cca3 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Lock "aaf52cad-86fd-42df-8ee3-13724e3f5e94" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1170.236116] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce164d0b-b604-4abc-8749-8dd242e8cca3 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Acquiring lock "aaf52cad-86fd-42df-8ee3-13724e3f5e94-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.236314] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce164d0b-b604-4abc-8749-8dd242e8cca3 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Lock "aaf52cad-86fd-42df-8ee3-13724e3f5e94-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1170.236489] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce164d0b-b604-4abc-8749-8dd242e8cca3 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Lock "aaf52cad-86fd-42df-8ee3-13724e3f5e94-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1170.238933] env[63538]: INFO nova.compute.manager [None req-ce164d0b-b604-4abc-8749-8dd242e8cca3 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Terminating instance [ 1170.241246] env[63538]: DEBUG nova.compute.manager [None req-ce164d0b-b604-4abc-8749-8dd242e8cca3 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1170.241582] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-373c21d7-a962-4e3e-98ab-8d4d142cae0a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.252295] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaaf8632-4a89-4e9a-b397-6c9c6cfeb94b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.287113] env[63538]: WARNING nova.virt.vmwareapi.driver [None req-ce164d0b-b604-4abc-8749-8dd242e8cca3 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance aaf52cad-86fd-42df-8ee3-13724e3f5e94 could not be found. [ 1170.287348] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ce164d0b-b604-4abc-8749-8dd242e8cca3 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1170.287688] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cae34552-6f3d-4df7-885d-4580986341c5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.298223] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9745f133-7f9b-4746-9b16-6d80d50b4bef {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.332702] env[63538]: WARNING nova.virt.vmwareapi.vmops [None req-ce164d0b-b604-4abc-8749-8dd242e8cca3 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance aaf52cad-86fd-42df-8ee3-13724e3f5e94 could not be found. [ 1170.333019] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ce164d0b-b604-4abc-8749-8dd242e8cca3 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1170.333122] env[63538]: INFO nova.compute.manager [None req-ce164d0b-b604-4abc-8749-8dd242e8cca3 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Took 0.09 seconds to destroy the instance on the hypervisor. [ 1170.333388] env[63538]: DEBUG oslo.service.loopingcall [None req-ce164d0b-b604-4abc-8749-8dd242e8cca3 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1170.333670] env[63538]: DEBUG nova.compute.manager [-] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1170.333771] env[63538]: DEBUG nova.network.neutron [-] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1170.639294] env[63538]: DEBUG oslo_concurrency.lockutils [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "refresh_cache-e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1170.639566] env[63538]: DEBUG oslo_concurrency.lockutils [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired lock "refresh_cache-e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.639750] env[63538]: DEBUG nova.network.neutron [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1170.683067] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.770s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1170.686260] env[63538]: DEBUG oslo_concurrency.lockutils [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.468s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1170.688472] env[63538]: INFO nova.compute.claims [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1170.715681] env[63538]: INFO nova.scheduler.client.report [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Deleted allocations for instance 4387a3ec-0f0b-4917-97f3-08c737bee4e7 [ 1170.847053] env[63538]: DEBUG nova.compute.manager [req-6e41c11c-bf04-4c9c-8645-7958a6eb315c req-11c24755-64df-4a0f-8734-dfe954b36628 service nova] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Received event network-vif-deleted-3e8852b6-74d6-48df-920f-ee0169a7772e {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1170.847053] env[63538]: INFO nova.compute.manager [req-6e41c11c-bf04-4c9c-8645-7958a6eb315c req-11c24755-64df-4a0f-8734-dfe954b36628 service nova] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Neutron deleted interface 3e8852b6-74d6-48df-920f-ee0169a7772e; detaching it from the instance and deleting it from the info cache [ 1170.847053] env[63538]: DEBUG nova.network.neutron [req-6e41c11c-bf04-4c9c-8645-7958a6eb315c req-11c24755-64df-4a0f-8734-dfe954b36628 service nova] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.226185] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a8aace11-0e85-45b7-9435-0c15d835bc4a tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "4387a3ec-0f0b-4917-97f3-08c737bee4e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.904s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1171.325042] env[63538]: DEBUG nova.network.neutron [-] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.349929] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2df6becf-2f45-4a78-a2ad-a6bf63fdf467 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.363529] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf60688-5216-4bcb-bf47-a65920edc936 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.222599] env[63538]: INFO nova.compute.manager [-] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Took 1.89 seconds to deallocate network for instance. [ 1172.224264] env[63538]: DEBUG nova.compute.manager [req-6e41c11c-bf04-4c9c-8645-7958a6eb315c req-11c24755-64df-4a0f-8734-dfe954b36628 service nova] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Detach interface failed, port_id=3e8852b6-74d6-48df-920f-ee0169a7772e, reason: Instance aaf52cad-86fd-42df-8ee3-13724e3f5e94 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1172.407941] env[63538]: DEBUG nova.network.neutron [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Updating instance_info_cache with network_info: [{"id": "c8b86754-970c-4f8a-b3fb-ec8fb42d3863", "address": "fa:16:3e:e0:99:90", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8b86754-97", "ovs_interfaceid": "c8b86754-970c-4f8a-b3fb-ec8fb42d3863", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1172.454554] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e849c13e-ed95-4638-98b6-49a8eb761f09 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.463727] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21aaf81d-b7e1-4d26-96f7-1a422bfb6f9a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.496772] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d642b9e6-5ce5-4c9a-be56-a2980fbecd7c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.506635] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7214735c-3533-40db-b856-abd618099b27 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.525339] env[63538]: DEBUG nova.compute.provider_tree [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1172.734242] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "42af31f3-a9d0-4fdd-99fa-442ebe915277" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1172.734562] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "42af31f3-a9d0-4fdd-99fa-442ebe915277" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1172.734820] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "42af31f3-a9d0-4fdd-99fa-442ebe915277-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1172.736785] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "42af31f3-a9d0-4fdd-99fa-442ebe915277-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.002s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1172.737079] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "42af31f3-a9d0-4fdd-99fa-442ebe915277-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1172.739725] env[63538]: INFO nova.compute.manager [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Terminating instance [ 1172.742092] env[63538]: DEBUG nova.compute.manager [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1172.742343] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1172.743356] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce1fe9d2-f604-4d0a-9019-ab0dd1c3e83b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.752685] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1172.753114] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-296ca5cd-0db6-4c69-b476-27b13f82572e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.761120] env[63538]: DEBUG oslo_vmware.api [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1172.761120] env[63538]: value = "task-5101824" [ 1172.761120] env[63538]: _type = "Task" [ 1172.761120] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.771987] env[63538]: DEBUG oslo_vmware.api [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101824, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.817451] env[63538]: INFO nova.compute.manager [None req-ce164d0b-b604-4abc-8749-8dd242e8cca3 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Took 0.59 seconds to detach 1 volumes for instance. [ 1172.819674] env[63538]: DEBUG nova.compute.manager [None req-ce164d0b-b604-4abc-8749-8dd242e8cca3 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Deleting volume: f66d7fd1-7450-49dd-aedf-14fdf30f2e90 {{(pid=63538) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 1172.915466] env[63538]: DEBUG oslo_concurrency.lockutils [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Releasing lock "refresh_cache-e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1173.032145] env[63538]: DEBUG nova.scheduler.client.report [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1173.272656] env[63538]: DEBUG oslo_vmware.api [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101824, 'name': PowerOffVM_Task, 'duration_secs': 0.288046} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.272964] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1173.273160] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1173.273432] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-38644f77-c600-4975-a03e-4de5efed7646 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.348920] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1173.349192] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1173.349475] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Deleting the datastore file [datastore1] 42af31f3-a9d0-4fdd-99fa-442ebe915277 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1173.349760] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d7c4fe90-29ad-4625-845b-1e086718a8bf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.358370] env[63538]: DEBUG oslo_vmware.api [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for the task: (returnval){ [ 1173.358370] env[63538]: value = "task-5101827" [ 1173.358370] env[63538]: _type = "Task" [ 1173.358370] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.367802] env[63538]: DEBUG oslo_vmware.api [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101827, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.381195] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce164d0b-b604-4abc-8749-8dd242e8cca3 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1173.443737] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa18868-8d82-4073-8f1c-a1e6757e4cb5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.465015] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53e1f26-e65d-450e-bb45-672496a68952 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.473127] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Updating instance 'e0d5a3b2-21e1-4de0-ac10-1a5687a60c10' progress to 83 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1173.538605] env[63538]: DEBUG oslo_concurrency.lockutils [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.852s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1173.539233] env[63538]: DEBUG nova.compute.manager [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1173.542245] env[63538]: DEBUG oslo_concurrency.lockutils [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.059s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1173.543728] env[63538]: INFO nova.compute.claims [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1173.854129] env[63538]: DEBUG oslo_vmware.rw_handles [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bd865b-caa5-7fc4-55dc-7c1d9aafde5a/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1173.855304] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-179298f3-ffa7-41e8-aeac-1ba0fe76d8fc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.864896] env[63538]: DEBUG oslo_vmware.rw_handles [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bd865b-caa5-7fc4-55dc-7c1d9aafde5a/disk-0.vmdk is in state: ready. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1173.865128] env[63538]: ERROR oslo_vmware.rw_handles [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bd865b-caa5-7fc4-55dc-7c1d9aafde5a/disk-0.vmdk due to incomplete transfer. [ 1173.865763] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5c7274ca-6c10-4967-9e09-ae65611c5771 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.871166] env[63538]: DEBUG oslo_vmware.api [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Task: {'id': task-5101827, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177478} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.871305] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1173.871460] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1173.871563] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1173.871746] env[63538]: INFO nova.compute.manager [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1173.872031] env[63538]: DEBUG oslo.service.loopingcall [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1173.872197] env[63538]: DEBUG nova.compute.manager [-] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1173.872295] env[63538]: DEBUG nova.network.neutron [-] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1173.875438] env[63538]: DEBUG oslo_vmware.rw_handles [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bd865b-caa5-7fc4-55dc-7c1d9aafde5a/disk-0.vmdk. {{(pid=63538) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1173.875632] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Uploaded image fc39aa85-5f54-44b2-83e7-fcf99170aec7 to the Glance image server {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 1173.878339] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Destroying the VM {{(pid=63538) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 1173.878603] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-163fcb43-6d8b-4a8c-984b-334431725b4d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.885384] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1173.885384] env[63538]: value = "task-5101828" [ 1173.885384] env[63538]: _type = "Task" [ 1173.885384] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.894526] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101828, 'name': Destroy_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.979974] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1173.980324] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b5d2c860-a16d-4231-9575-0676cc8ebc3a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.987933] env[63538]: DEBUG oslo_vmware.api [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1173.987933] env[63538]: value = "task-5101829" [ 1173.987933] env[63538]: _type = "Task" [ 1173.987933] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.998071] env[63538]: DEBUG oslo_vmware.api [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101829, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.049085] env[63538]: DEBUG nova.compute.utils [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1174.052744] env[63538]: DEBUG nova.compute.manager [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Not allocating networking since 'none' was specified. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1174.379066] env[63538]: DEBUG nova.compute.manager [req-effd9ecf-c82e-4d5a-9095-d711f5075508 req-9cdfe671-eb73-40a1-94ad-15f7aa6e9da9 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Received event network-vif-deleted-dfaa4640-ae2a-444b-aa92-e24dd9eca692 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1174.379406] env[63538]: INFO nova.compute.manager [req-effd9ecf-c82e-4d5a-9095-d711f5075508 req-9cdfe671-eb73-40a1-94ad-15f7aa6e9da9 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Neutron deleted interface dfaa4640-ae2a-444b-aa92-e24dd9eca692; detaching it from the instance and deleting it from the info cache [ 1174.379685] env[63538]: DEBUG nova.network.neutron [req-effd9ecf-c82e-4d5a-9095-d711f5075508 req-9cdfe671-eb73-40a1-94ad-15f7aa6e9da9 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1174.400441] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101828, 'name': Destroy_Task, 'duration_secs': 0.505245} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.401213] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Destroyed the VM [ 1174.401813] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Deleting Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1174.402479] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-cd1477f6-1bcf-4c48-ac9a-415656f51e2b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.412575] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1174.412575] env[63538]: value = "task-5101830" [ 1174.412575] env[63538]: _type = "Task" [ 1174.412575] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.427418] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101830, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.502145] env[63538]: DEBUG oslo_vmware.api [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101829, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.554231] env[63538]: DEBUG nova.compute.manager [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1174.722817] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2d9de1-91e1-4540-9e64-d71ddd5f6732 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.734226] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a07afad-550d-41d6-b2e8-5480a8b1add9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.770134] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3498ac29-e7ec-4e24-bb2d-e302a4caf967 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.778921] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b244e58-c9c9-4b95-a244-006744bd8eae {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.800156] env[63538]: DEBUG nova.compute.provider_tree [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1174.850325] env[63538]: DEBUG nova.network.neutron [-] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1174.883411] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e3378605-b13c-4189-8bc7-01786521818c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.895042] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f758b34-5de9-4e0c-8f2a-622c07a57380 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.937714] env[63538]: DEBUG nova.compute.manager [req-effd9ecf-c82e-4d5a-9095-d711f5075508 req-9cdfe671-eb73-40a1-94ad-15f7aa6e9da9 service nova] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Detach interface failed, port_id=dfaa4640-ae2a-444b-aa92-e24dd9eca692, reason: Instance 42af31f3-a9d0-4fdd-99fa-442ebe915277 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1174.945627] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101830, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.999367] env[63538]: DEBUG oslo_vmware.api [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101829, 'name': PowerOnVM_Task, 'duration_secs': 0.538409} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.999686] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1174.999881] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-39bfa1a9-a712-4418-9e47-d6725ed447e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Updating instance 'e0d5a3b2-21e1-4de0-ac10-1a5687a60c10' progress to 100 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1175.302264] env[63538]: DEBUG nova.scheduler.client.report [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1175.353100] env[63538]: INFO nova.compute.manager [-] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Took 1.48 seconds to deallocate network for instance. [ 1175.428709] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101830, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.567755] env[63538]: DEBUG nova.compute.manager [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1175.610104] env[63538]: DEBUG nova.virt.hardware [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1175.610520] env[63538]: DEBUG nova.virt.hardware [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1175.610790] env[63538]: DEBUG nova.virt.hardware [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1175.611195] env[63538]: DEBUG nova.virt.hardware [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1175.611397] env[63538]: DEBUG nova.virt.hardware [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1175.611694] env[63538]: DEBUG nova.virt.hardware [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1175.612063] env[63538]: DEBUG nova.virt.hardware [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1175.612337] env[63538]: DEBUG nova.virt.hardware [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1175.612641] env[63538]: DEBUG nova.virt.hardware [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1175.612928] env[63538]: DEBUG nova.virt.hardware [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1175.613280] env[63538]: DEBUG nova.virt.hardware [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1175.614514] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37e3bc7-91c0-43e9-b8db-7e020b386946 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.626353] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-708f9491-2d26-4f49-9789-be3c1cab601f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.643330] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Instance VIF info [] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1175.649991] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Creating folder: Project (affa555448014b6aaf78be0467181790). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1175.650451] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9ab9e17d-414f-44c4-bdc1-1d8cc0fdf168 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.662639] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Created folder: Project (affa555448014b6aaf78be0467181790) in parent group-v992234. [ 1175.662818] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Creating folder: Instances. Parent ref: group-v992537. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1175.664168] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-35fe1629-1f8c-4173-94b9-bd07d6faafbf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.677027] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Created folder: Instances in parent group-v992537. [ 1175.677027] env[63538]: DEBUG oslo.service.loopingcall [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1175.677266] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1175.677422] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e3ebdfb1-2998-4cf4-80a1-0ca8b26d5176 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.696955] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1175.696955] env[63538]: value = "task-5101833" [ 1175.696955] env[63538]: _type = "Task" [ 1175.696955] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.707638] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101833, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.810525] env[63538]: DEBUG oslo_concurrency.lockutils [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.268s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.811069] env[63538]: DEBUG nova.compute.manager [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1175.813634] env[63538]: DEBUG oslo_concurrency.lockutils [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.131s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1175.862902] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.928049] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101830, 'name': RemoveSnapshot_Task, 'duration_secs': 1.129259} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.931335] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Deleted Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1175.931655] env[63538]: DEBUG nova.compute.manager [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1175.933468] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e212a51-be4f-4e15-8e62-e37b8ef1333f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.983948] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db8c998-3523-4ff3-b21c-a1c697e5c97a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.993815] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea9796ff-d810-4cf9-9f2c-f72fe08cf2c1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.040821] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc242873-faf8-4d0e-a3ca-98769ad311f0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.052941] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e80288a-8317-440f-8c68-c3b1417e83ad {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.076607] env[63538]: DEBUG nova.compute.provider_tree [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1176.208360] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101833, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.319221] env[63538]: DEBUG nova.compute.utils [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1176.323309] env[63538]: DEBUG nova.compute.manager [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Not allocating networking since 'none' was specified. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1176.452839] env[63538]: INFO nova.compute.manager [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Shelve offloading [ 1176.453446] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1176.453727] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e8922728-d85c-4570-9203-5f75613cd078 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.461690] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1176.461690] env[63538]: value = "task-5101834" [ 1176.461690] env[63538]: _type = "Task" [ 1176.461690] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.472707] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] VM already powered off {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1176.472947] env[63538]: DEBUG nova.compute.manager [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1176.473777] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13746859-23a0-445b-976d-c7eb3bf6f33b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.483875] env[63538]: DEBUG oslo_concurrency.lockutils [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "refresh_cache-d00151c1-ca34-4c57-9ed2-74d506a0cffb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1176.484114] env[63538]: DEBUG oslo_concurrency.lockutils [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquired lock "refresh_cache-d00151c1-ca34-4c57-9ed2-74d506a0cffb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1176.484307] env[63538]: DEBUG nova.network.neutron [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1176.580971] env[63538]: DEBUG nova.scheduler.client.report [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1176.709456] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101833, 'name': CreateVM_Task, 'duration_secs': 0.517919} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.709654] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1176.710138] env[63538]: DEBUG oslo_concurrency.lockutils [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1176.710357] env[63538]: DEBUG oslo_concurrency.lockutils [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1176.710663] env[63538]: DEBUG oslo_concurrency.lockutils [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1176.710922] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e24c508-6e7a-4683-9060-e6a8b9bb3881 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.716119] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1176.716119] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52617080-9c46-aca8-2c06-23730e48b4aa" [ 1176.716119] env[63538]: _type = "Task" [ 1176.716119] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.724493] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52617080-9c46-aca8-2c06-23730e48b4aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.824171] env[63538]: DEBUG nova.compute.manager [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1177.088966] env[63538]: DEBUG oslo_concurrency.lockutils [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.274s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1177.089500] env[63538]: INFO nova.compute.manager [None req-99ab706d-5b97-49b0-81f2-11b12f851034 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Successfully reverted task state from rebuilding on failure for instance. [ 1177.096553] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce164d0b-b604-4abc-8749-8dd242e8cca3 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.715s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1177.096934] env[63538]: DEBUG nova.objects.instance [None req-ce164d0b-b604-4abc-8749-8dd242e8cca3 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Lazy-loading 'resources' on Instance uuid aaf52cad-86fd-42df-8ee3-13724e3f5e94 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1177.230135] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52617080-9c46-aca8-2c06-23730e48b4aa, 'name': SearchDatastore_Task, 'duration_secs': 0.013759} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.230509] env[63538]: DEBUG oslo_concurrency.lockutils [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1177.230900] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1177.231084] env[63538]: DEBUG oslo_concurrency.lockutils [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1177.231204] env[63538]: DEBUG oslo_concurrency.lockutils [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1177.231390] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1177.234278] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-631c8c0a-c2c1-492c-8bb0-c95234155b45 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.243040] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1177.243201] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1177.243896] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fcef50e-c762-477e-83ca-bce4dff57bfc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.251980] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1177.251980] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526a524b-5074-b634-8807-d3a0a66f333a" [ 1177.251980] env[63538]: _type = "Task" [ 1177.251980] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.263097] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526a524b-5074-b634-8807-d3a0a66f333a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.295676] env[63538]: DEBUG nova.network.neutron [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Updating instance_info_cache with network_info: [{"id": "319cec1c-49eb-43a4-a9ec-6b74a507b6d6", "address": "fa:16:3e:ce:c8:c1", "network": {"id": "8fd1ecf4-3813-4ca4-82b4-8ba3f04e3c41", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1232144260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fe11c1386b14d139f4416cbf20fb201", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap319cec1c-49", "ovs_interfaceid": "319cec1c-49eb-43a4-a9ec-6b74a507b6d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1177.297814] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Acquiring lock "58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.298178] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Lock "58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1177.753795] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77337571-feea-4110-82cd-4da3153c4479 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.768900] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d14837-863b-4b2e-af73-6fa750d7a2ff {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.771941] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526a524b-5074-b634-8807-d3a0a66f333a, 'name': SearchDatastore_Task, 'duration_secs': 0.01102} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.773099] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9d1cd4e-080d-42c0-8a5e-39518a1220b9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.803336] env[63538]: DEBUG nova.compute.manager [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1177.806191] env[63538]: DEBUG oslo_concurrency.lockutils [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Releasing lock "refresh_cache-d00151c1-ca34-4c57-9ed2-74d506a0cffb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1177.809782] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b063f65-3e80-4b87-957d-630335d710c2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.813607] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1177.813607] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522f1d57-1e42-272c-08d5-93100a2f9044" [ 1177.813607] env[63538]: _type = "Task" [ 1177.813607] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.820458] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a233e0-63bc-42cc-b9ed-d5ea576fd117 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.830606] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522f1d57-1e42-272c-08d5-93100a2f9044, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.834320] env[63538]: DEBUG nova.compute.manager [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1177.846507] env[63538]: DEBUG nova.compute.provider_tree [None req-ce164d0b-b604-4abc-8749-8dd242e8cca3 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1177.870509] env[63538]: DEBUG nova.virt.hardware [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1177.870788] env[63538]: DEBUG nova.virt.hardware [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1177.870938] env[63538]: DEBUG nova.virt.hardware [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1177.871467] env[63538]: DEBUG nova.virt.hardware [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1177.871467] env[63538]: DEBUG nova.virt.hardware [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1177.871467] env[63538]: DEBUG nova.virt.hardware [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1177.871635] env[63538]: DEBUG nova.virt.hardware [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1177.871799] env[63538]: DEBUG nova.virt.hardware [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1177.871968] env[63538]: DEBUG nova.virt.hardware [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1177.872155] env[63538]: DEBUG nova.virt.hardware [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1177.872336] env[63538]: DEBUG nova.virt.hardware [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1177.873565] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa41f6b-e60d-4c99-a104-d233ae69fada {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.883066] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e17fe1f-ec5d-4d0a-8fde-2b6e0389626f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.897790] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Instance VIF info [] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1177.903420] env[63538]: DEBUG oslo.service.loopingcall [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1177.905995] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1177.906258] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-558e3dfa-bd6c-4c34-8198-50d1bc8f3de3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.922444] env[63538]: DEBUG nova.network.neutron [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Port c8b86754-970c-4f8a-b3fb-ec8fb42d3863 binding to destination host cpu-1 is already ACTIVE {{(pid=63538) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1177.922648] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "refresh_cache-e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1177.922755] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired lock "refresh_cache-e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1177.922915] env[63538]: DEBUG nova.network.neutron [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1177.925301] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1177.925301] env[63538]: value = "task-5101835" [ 1177.925301] env[63538]: _type = "Task" [ 1177.925301] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.934520] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101835, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.181628] env[63538]: DEBUG nova.compute.manager [req-2bd20b4d-036c-4257-a655-7f9f2a863512 req-528c289c-0843-46b9-a9e0-6be36d4b2a8d service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Received event network-vif-unplugged-319cec1c-49eb-43a4-a9ec-6b74a507b6d6 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1178.181867] env[63538]: DEBUG oslo_concurrency.lockutils [req-2bd20b4d-036c-4257-a655-7f9f2a863512 req-528c289c-0843-46b9-a9e0-6be36d4b2a8d service nova] Acquiring lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1178.182096] env[63538]: DEBUG oslo_concurrency.lockutils [req-2bd20b4d-036c-4257-a655-7f9f2a863512 req-528c289c-0843-46b9-a9e0-6be36d4b2a8d service nova] Lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1178.182279] env[63538]: DEBUG oslo_concurrency.lockutils [req-2bd20b4d-036c-4257-a655-7f9f2a863512 req-528c289c-0843-46b9-a9e0-6be36d4b2a8d service nova] Lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1178.182454] env[63538]: DEBUG nova.compute.manager [req-2bd20b4d-036c-4257-a655-7f9f2a863512 req-528c289c-0843-46b9-a9e0-6be36d4b2a8d service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] No waiting events found dispatching network-vif-unplugged-319cec1c-49eb-43a4-a9ec-6b74a507b6d6 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1178.182628] env[63538]: WARNING nova.compute.manager [req-2bd20b4d-036c-4257-a655-7f9f2a863512 req-528c289c-0843-46b9-a9e0-6be36d4b2a8d service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Received unexpected event network-vif-unplugged-319cec1c-49eb-43a4-a9ec-6b74a507b6d6 for instance with vm_state shelved and task_state shelving_offloading. [ 1178.245038] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1178.246078] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55686140-7fe1-4934-9b0d-86aaa6b74655 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.255797] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1178.256171] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-18065456-20d6-4937-b81d-da7dbced6e0f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.329997] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522f1d57-1e42-272c-08d5-93100a2f9044, 'name': SearchDatastore_Task, 'duration_secs': 0.012717} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.330549] env[63538]: DEBUG oslo_concurrency.lockutils [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1178.330665] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 6a8de8d2-aa15-4057-a936-57cad9c8b1d0/6a8de8d2-aa15-4057-a936-57cad9c8b1d0.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1178.331018] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fea1d308-b542-4053-b2f2-d0732d157997 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.336893] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1178.337271] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1178.337489] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Deleting the datastore file [datastore1] d00151c1-ca34-4c57-9ed2-74d506a0cffb {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1178.338776] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1178.339160] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-21888f51-a19e-4a39-89c2-3de9dfd63464 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.343598] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1178.343598] env[63538]: value = "task-5101837" [ 1178.343598] env[63538]: _type = "Task" [ 1178.343598] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.350662] env[63538]: DEBUG nova.scheduler.client.report [None req-ce164d0b-b604-4abc-8749-8dd242e8cca3 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1178.354094] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1178.354094] env[63538]: value = "task-5101838" [ 1178.354094] env[63538]: _type = "Task" [ 1178.354094] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.362826] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101837, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.369942] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101838, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.443200] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101835, 'name': CreateVM_Task, 'duration_secs': 0.420742} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.443388] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1178.445135] env[63538]: DEBUG oslo_concurrency.lockutils [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1178.445343] env[63538]: DEBUG oslo_concurrency.lockutils [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.445736] env[63538]: DEBUG oslo_concurrency.lockutils [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1178.446454] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a15fcf28-9d2c-4132-a5b5-4a820077cb50 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.455768] env[63538]: DEBUG oslo_vmware.api [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1178.455768] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52eaff7c-3db2-b461-672f-2eb3f4a80e67" [ 1178.455768] env[63538]: _type = "Task" [ 1178.455768] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.467664] env[63538]: DEBUG oslo_vmware.api [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52eaff7c-3db2-b461-672f-2eb3f4a80e67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.854674] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101837, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.855637] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce164d0b-b604-4abc-8749-8dd242e8cca3 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.759s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1178.858168] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.996s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1178.858594] env[63538]: DEBUG nova.objects.instance [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lazy-loading 'resources' on Instance uuid 42af31f3-a9d0-4fdd-99fa-442ebe915277 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1178.872204] env[63538]: DEBUG oslo_vmware.api [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101838, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211913} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.879052] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1178.879344] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1178.879541] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1178.907504] env[63538]: INFO nova.scheduler.client.report [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Deleted allocations for instance d00151c1-ca34-4c57-9ed2-74d506a0cffb [ 1178.971556] env[63538]: DEBUG oslo_vmware.api [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52eaff7c-3db2-b461-672f-2eb3f4a80e67, 'name': SearchDatastore_Task, 'duration_secs': 0.032173} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.971885] env[63538]: DEBUG oslo_concurrency.lockutils [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1178.972361] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1178.972718] env[63538]: DEBUG oslo_concurrency.lockutils [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1178.972783] env[63538]: DEBUG oslo_concurrency.lockutils [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.972957] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1178.973256] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-609ddf26-0580-4629-b481-8e2c36f18a21 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.987926] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1178.988109] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1178.989204] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2f9e94f-7700-4504-8931-2b3b9093480d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.995784] env[63538]: DEBUG oslo_vmware.api [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1178.995784] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52adf42b-0ba3-57e9-6171-41f7f8b00a58" [ 1178.995784] env[63538]: _type = "Task" [ 1178.995784] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.008151] env[63538]: DEBUG oslo_vmware.api [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52adf42b-0ba3-57e9-6171-41f7f8b00a58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.048593] env[63538]: DEBUG nova.network.neutron [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Updating instance_info_cache with network_info: [{"id": "c8b86754-970c-4f8a-b3fb-ec8fb42d3863", "address": "fa:16:3e:e0:99:90", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8b86754-97", "ovs_interfaceid": "c8b86754-970c-4f8a-b3fb-ec8fb42d3863", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1179.099627] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1179.099858] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1179.100015] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Starting heal instance info cache {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10017}} [ 1179.354300] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101837, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.588727} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.354574] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 6a8de8d2-aa15-4057-a936-57cad9c8b1d0/6a8de8d2-aa15-4057-a936-57cad9c8b1d0.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1179.354810] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1179.355132] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-711d5a74-d483-4942-b740-0dff1bf81f33 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.366656] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1179.366656] env[63538]: value = "task-5101839" [ 1179.366656] env[63538]: _type = "Task" [ 1179.366656] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.375366] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101839, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.393236] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ce164d0b-b604-4abc-8749-8dd242e8cca3 tempest-ServerActionsV293TestJSON-1816572243 tempest-ServerActionsV293TestJSON-1816572243-project-member] Lock "aaf52cad-86fd-42df-8ee3-13724e3f5e94" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.157s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1179.417392] env[63538]: DEBUG oslo_concurrency.lockutils [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1179.508090] env[63538]: DEBUG oslo_vmware.api [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52adf42b-0ba3-57e9-6171-41f7f8b00a58, 'name': SearchDatastore_Task, 'duration_secs': 0.011433} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.510202] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c631bc-449e-4220-b09a-762d35829258 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.513469] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-407c025a-2cb0-4073-bdcd-2360a363155b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.522796] env[63538]: DEBUG oslo_vmware.api [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1179.522796] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c15a09-6bce-1fbb-c7c7-9b04283d0dcc" [ 1179.522796] env[63538]: _type = "Task" [ 1179.522796] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.524111] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd9b8e7-73ed-420e-b003-30de35e4348a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.535802] env[63538]: DEBUG oslo_vmware.api [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c15a09-6bce-1fbb-c7c7-9b04283d0dcc, 'name': SearchDatastore_Task, 'duration_secs': 0.010894} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.559275] env[63538]: DEBUG oslo_concurrency.lockutils [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1179.559600] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] d91a140b-6ca9-4c0e-b433-795d2014975c/d91a140b-6ca9-4c0e-b433-795d2014975c.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1179.560476] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Releasing lock "refresh_cache-e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1179.561983] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-49cbe188-7e11-4a78-8fe9-2dff19828ffe {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.564508] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b1b6e34-1fa7-4c47-965f-edbfa9810784 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.574513] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-873063ae-7283-4d7a-91c2-fdcca49791c5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.578553] env[63538]: DEBUG oslo_vmware.api [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1179.578553] env[63538]: value = "task-5101840" [ 1179.578553] env[63538]: _type = "Task" [ 1179.578553] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.590462] env[63538]: DEBUG nova.compute.provider_tree [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1179.597599] env[63538]: DEBUG oslo_vmware.api [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101840, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.880345] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101839, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069357} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.880814] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1179.882116] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f50de384-9445-44b7-9d67-da727c6d85f3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.922646] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 6a8de8d2-aa15-4057-a936-57cad9c8b1d0/6a8de8d2-aa15-4057-a936-57cad9c8b1d0.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1179.923418] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed6546e4-24bb-4e40-96eb-c0ce7d4c678d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.948446] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1179.948446] env[63538]: value = "task-5101841" [ 1179.948446] env[63538]: _type = "Task" [ 1179.948446] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.961142] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101841, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.069479] env[63538]: DEBUG nova.compute.manager [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63538) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 1180.069717] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1180.090231] env[63538]: DEBUG oslo_vmware.api [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101840, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.500642} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.090481] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] d91a140b-6ca9-4c0e-b433-795d2014975c/d91a140b-6ca9-4c0e-b433-795d2014975c.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1180.090698] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1180.090976] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8e69e0f9-7dd7-4d36-baba-1fdb51c0e04e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.094290] env[63538]: DEBUG nova.scheduler.client.report [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1180.103915] env[63538]: DEBUG oslo_vmware.api [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1180.103915] env[63538]: value = "task-5101842" [ 1180.103915] env[63538]: _type = "Task" [ 1180.103915] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.117713] env[63538]: DEBUG oslo_vmware.api [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101842, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.129650] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1180.129820] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquired lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.130048] env[63538]: DEBUG nova.network.neutron [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Forcefully refreshing network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1180.209619] env[63538]: DEBUG nova.compute.manager [req-38b92f3e-d2b0-451e-9b8e-fa181c32eea0 req-78cc93fa-3710-423b-a376-7ac481ae33d7 service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Received event network-changed-319cec1c-49eb-43a4-a9ec-6b74a507b6d6 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1180.209785] env[63538]: DEBUG nova.compute.manager [req-38b92f3e-d2b0-451e-9b8e-fa181c32eea0 req-78cc93fa-3710-423b-a376-7ac481ae33d7 service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Refreshing instance network info cache due to event network-changed-319cec1c-49eb-43a4-a9ec-6b74a507b6d6. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1180.210008] env[63538]: DEBUG oslo_concurrency.lockutils [req-38b92f3e-d2b0-451e-9b8e-fa181c32eea0 req-78cc93fa-3710-423b-a376-7ac481ae33d7 service nova] Acquiring lock "refresh_cache-d00151c1-ca34-4c57-9ed2-74d506a0cffb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1180.210169] env[63538]: DEBUG oslo_concurrency.lockutils [req-38b92f3e-d2b0-451e-9b8e-fa181c32eea0 req-78cc93fa-3710-423b-a376-7ac481ae33d7 service nova] Acquired lock "refresh_cache-d00151c1-ca34-4c57-9ed2-74d506a0cffb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.210336] env[63538]: DEBUG nova.network.neutron [req-38b92f3e-d2b0-451e-9b8e-fa181c32eea0 req-78cc93fa-3710-423b-a376-7ac481ae33d7 service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Refreshing network info cache for port 319cec1c-49eb-43a4-a9ec-6b74a507b6d6 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1180.458960] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101841, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.600017] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.742s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.602485] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.264s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1180.603991] env[63538]: INFO nova.compute.claims [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1180.617861] env[63538]: DEBUG oslo_vmware.api [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101842, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073556} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.618362] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1180.619335] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc609170-d6cc-4a1b-9f7d-485fd9cdedd6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.644188] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] d91a140b-6ca9-4c0e-b433-795d2014975c/d91a140b-6ca9-4c0e-b433-795d2014975c.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1180.645341] env[63538]: INFO nova.scheduler.client.report [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Deleted allocations for instance 42af31f3-a9d0-4fdd-99fa-442ebe915277 [ 1180.646339] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-010a6576-4b66-4fb3-8ced-57aee1ed569d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.671296] env[63538]: DEBUG oslo_vmware.api [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1180.671296] env[63538]: value = "task-5101843" [ 1180.671296] env[63538]: _type = "Task" [ 1180.671296] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.680718] env[63538]: DEBUG oslo_vmware.api [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101843, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.681394] env[63538]: DEBUG nova.network.neutron [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1180.964509] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101841, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.059965] env[63538]: DEBUG nova.network.neutron [req-38b92f3e-d2b0-451e-9b8e-fa181c32eea0 req-78cc93fa-3710-423b-a376-7ac481ae33d7 service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Updated VIF entry in instance network info cache for port 319cec1c-49eb-43a4-a9ec-6b74a507b6d6. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1181.059965] env[63538]: DEBUG nova.network.neutron [req-38b92f3e-d2b0-451e-9b8e-fa181c32eea0 req-78cc93fa-3710-423b-a376-7ac481ae33d7 service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Updating instance_info_cache with network_info: [{"id": "319cec1c-49eb-43a4-a9ec-6b74a507b6d6", "address": "fa:16:3e:ce:c8:c1", "network": {"id": "8fd1ecf4-3813-4ca4-82b4-8ba3f04e3c41", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1232144260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fe11c1386b14d139f4416cbf20fb201", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap319cec1c-49", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1181.170035] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5cb2aef8-42cc-4c4a-a9f6-338aac34f6b6 tempest-AttachInterfacesTestJSON-1766979869 tempest-AttachInterfacesTestJSON-1766979869-project-member] Lock "42af31f3-a9d0-4fdd-99fa-442ebe915277" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.435s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.185554] env[63538]: DEBUG oslo_vmware.api [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101843, 'name': ReconfigVM_Task, 'duration_secs': 0.307968} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.186844] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Reconfigured VM instance instance-00000073 to attach disk [datastore1] d91a140b-6ca9-4c0e-b433-795d2014975c/d91a140b-6ca9-4c0e-b433-795d2014975c.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1181.187810] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c3d38afa-f4b7-430a-bd02-3398fdf98299 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.196592] env[63538]: DEBUG oslo_vmware.api [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1181.196592] env[63538]: value = "task-5101844" [ 1181.196592] env[63538]: _type = "Task" [ 1181.196592] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.207349] env[63538]: DEBUG oslo_vmware.api [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101844, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.311635] env[63538]: DEBUG nova.network.neutron [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1181.462630] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101841, 'name': ReconfigVM_Task, 'duration_secs': 1.04254} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.462969] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 6a8de8d2-aa15-4057-a936-57cad9c8b1d0/6a8de8d2-aa15-4057-a936-57cad9c8b1d0.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1181.463684] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9926d78f-91da-4d38-a22b-95378f651bf1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.470949] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1181.470949] env[63538]: value = "task-5101845" [ 1181.470949] env[63538]: _type = "Task" [ 1181.470949] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.480086] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101845, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.562160] env[63538]: DEBUG oslo_concurrency.lockutils [req-38b92f3e-d2b0-451e-9b8e-fa181c32eea0 req-78cc93fa-3710-423b-a376-7ac481ae33d7 service nova] Releasing lock "refresh_cache-d00151c1-ca34-4c57-9ed2-74d506a0cffb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1181.706706] env[63538]: DEBUG oslo_vmware.api [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101844, 'name': Rename_Task, 'duration_secs': 0.161178} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.707161] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1181.707435] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f27e14e-133f-4629-8d92-88c2bcb2d228 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.717188] env[63538]: DEBUG oslo_vmware.api [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1181.717188] env[63538]: value = "task-5101846" [ 1181.717188] env[63538]: _type = "Task" [ 1181.717188] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.726609] env[63538]: DEBUG oslo_vmware.api [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101846, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.760306] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7de149f-2ad8-4c78-8c90-23002986e720 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.767806] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89fc312b-9905-4cf1-b1bc-92b208502eaa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.804432] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d164503-7138-47b6-bcee-a43c73f1d71a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.814395] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a5b1a5-0007-48fb-bdbd-a17c38866931 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.818459] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Releasing lock "refresh_cache-42af31f3-a9d0-4fdd-99fa-442ebe915277" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1181.818653] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Updated the network info_cache for instance {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10088}} [ 1181.819122] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1181.819742] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1181.819942] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1181.820154] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1181.820323] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1181.820470] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._sync_power_states {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1181.830725] env[63538]: DEBUG nova.compute.provider_tree [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1181.982949] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101845, 'name': Rename_Task, 'duration_secs': 0.220425} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.982949] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1181.982949] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-db8b9d50-401b-4bc2-887b-061e16cee516 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.989381] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1181.989381] env[63538]: value = "task-5101847" [ 1181.989381] env[63538]: _type = "Task" [ 1181.989381] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.998180] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101847, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.228069] env[63538]: DEBUG oslo_vmware.api [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101846, 'name': PowerOnVM_Task, 'duration_secs': 0.498179} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.228339] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1182.228578] env[63538]: INFO nova.compute.manager [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Took 4.39 seconds to spawn the instance on the hypervisor. [ 1182.228769] env[63538]: DEBUG nova.compute.manager [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1182.229596] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f17201ee-f639-4ee3-8539-c561d5c241c8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.303981] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.336108] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Getting list of instances from cluster (obj){ [ 1182.336108] env[63538]: value = "domain-c8" [ 1182.336108] env[63538]: _type = "ClusterComputeResource" [ 1182.336108] env[63538]: } {{(pid=63538) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1182.337037] env[63538]: DEBUG nova.scheduler.client.report [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1182.344982] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a8ea53-c9c0-4306-b44a-cb3c4cfbec28 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.368522] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Got total of 7 instances {{(pid=63538) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1182.368848] env[63538]: WARNING nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] While synchronizing instance power states, found 9 instances in the database and 7 instances on the hypervisor. [ 1182.369096] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Triggering sync for uuid fb26fb32-a420-4667-850c-e32786edd8f2 {{(pid=63538) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10427}} [ 1182.369456] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Triggering sync for uuid 048573b4-26db-4a62-81e0-1bc1c3999d02 {{(pid=63538) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10427}} [ 1182.369760] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Triggering sync for uuid d00151c1-ca34-4c57-9ed2-74d506a0cffb {{(pid=63538) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10427}} [ 1182.370054] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Triggering sync for uuid e0d5a3b2-21e1-4de0-ac10-1a5687a60c10 {{(pid=63538) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10427}} [ 1182.370343] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Triggering sync for uuid e3feec17-ca1b-4873-bb0a-370c3868aabf {{(pid=63538) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10427}} [ 1182.370629] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Triggering sync for uuid f0183c1f-4557-45fd-ba65-4821ef661173 {{(pid=63538) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10427}} [ 1182.370913] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Triggering sync for uuid 6a8de8d2-aa15-4057-a936-57cad9c8b1d0 {{(pid=63538) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10427}} [ 1182.371224] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Triggering sync for uuid d91a140b-6ca9-4c0e-b433-795d2014975c {{(pid=63538) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10427}} [ 1182.371479] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Triggering sync for uuid 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a {{(pid=63538) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10427}} [ 1182.372433] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "fb26fb32-a420-4667-850c-e32786edd8f2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.372788] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "fb26fb32-a420-4667-850c-e32786edd8f2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1182.373248] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "048573b4-26db-4a62-81e0-1bc1c3999d02" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.373563] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "048573b4-26db-4a62-81e0-1bc1c3999d02" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1182.373907] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.374192] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.374472] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1182.374821] env[63538]: INFO nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] During sync_power_state the instance has a pending task (resize_reverting). Skip. [ 1182.375028] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1182.375364] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "e3feec17-ca1b-4873-bb0a-370c3868aabf" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.375703] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "e3feec17-ca1b-4873-bb0a-370c3868aabf" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1182.376039] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "f0183c1f-4557-45fd-ba65-4821ef661173" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.376380] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "f0183c1f-4557-45fd-ba65-4821ef661173" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1182.376755] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "6a8de8d2-aa15-4057-a936-57cad9c8b1d0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.377061] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "d91a140b-6ca9-4c0e-b433-795d2014975c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.377300] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.377501] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1182.377632] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63538) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10636}} [ 1182.378406] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8858d6c1-3b3f-4ae5-8ec9-1889ad725c91 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.381820] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc8f7c2e-d0e3-4dbc-8bd7-6ead7c1a738b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.385068] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84cb0b23-7547-45e7-a27e-7588f953f530 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.388219] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eaf8f77-12df-4f7a-8760-b2562337527d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.390639] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1182.503993] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101847, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.746605] env[63538]: INFO nova.compute.manager [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Took 13.28 seconds to build instance. [ 1182.849366] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.247s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1182.850177] env[63538]: DEBUG nova.compute.manager [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1182.854122] env[63538]: DEBUG oslo_concurrency.lockutils [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.437s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1182.854492] env[63538]: DEBUG nova.objects.instance [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lazy-loading 'resources' on Instance uuid d00151c1-ca34-4c57-9ed2-74d506a0cffb {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1182.896915] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.906852] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "048573b4-26db-4a62-81e0-1bc1c3999d02" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.533s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1182.909581] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "e3feec17-ca1b-4873-bb0a-370c3868aabf" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.534s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1182.909915] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "f0183c1f-4557-45fd-ba65-4821ef661173" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.534s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1182.910261] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "fb26fb32-a420-4667-850c-e32786edd8f2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.538s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1183.001410] env[63538]: DEBUG oslo_vmware.api [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101847, 'name': PowerOnVM_Task, 'duration_secs': 0.528802} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.001796] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1183.001917] env[63538]: INFO nova.compute.manager [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Took 7.43 seconds to spawn the instance on the hypervisor. [ 1183.002130] env[63538]: DEBUG nova.compute.manager [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1183.003007] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b30232-cd90-494e-bbf9-65e486015b5e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.249872] env[63538]: DEBUG oslo_concurrency.lockutils [None req-74421bff-dd3d-4fb4-a914-f6403d635124 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Lock "d91a140b-6ca9-4c0e-b433-795d2014975c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.798s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1183.250359] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "d91a140b-6ca9-4c0e-b433-795d2014975c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.873s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1183.250579] env[63538]: INFO nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] During sync_power_state the instance has a pending task (spawning). Skip. [ 1183.251065] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "d91a140b-6ca9-4c0e-b433-795d2014975c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1183.358289] env[63538]: DEBUG nova.compute.utils [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1183.359982] env[63538]: DEBUG nova.objects.instance [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lazy-loading 'numa_topology' on Instance uuid d00151c1-ca34-4c57-9ed2-74d506a0cffb {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1183.361280] env[63538]: DEBUG nova.compute.manager [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1183.361453] env[63538]: DEBUG nova.network.neutron [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1183.425043] env[63538]: DEBUG nova.policy [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '196d913d27f8451ea0de484df737c7c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c381e05a12ae4cd4b83e21927e5d0a36', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1183.521149] env[63538]: INFO nova.compute.manager [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Took 14.33 seconds to build instance. [ 1183.765846] env[63538]: DEBUG nova.network.neutron [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Successfully created port: 8e0bc120-3db7-47f2-924e-a7cb1aad608b {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1183.770929] env[63538]: INFO nova.compute.manager [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Rebuilding instance [ 1183.832240] env[63538]: DEBUG nova.compute.manager [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1183.832917] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e306677-0dfa-4cde-b19f-700d76b4b4e0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.865279] env[63538]: DEBUG nova.compute.manager [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1183.866457] env[63538]: DEBUG nova.objects.base [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=63538) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1184.024032] env[63538]: DEBUG oslo_concurrency.lockutils [None req-08ffdce9-bec1-4678-8dd9-e3ccebe936bd tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Lock "6a8de8d2-aa15-4057-a936-57cad9c8b1d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.838s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1184.024390] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "6a8de8d2-aa15-4057-a936-57cad9c8b1d0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 1.648s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1184.024597] env[63538]: INFO nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] During sync_power_state the instance has a pending task (spawning). Skip. [ 1184.024790] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "6a8de8d2-aa15-4057-a936-57cad9c8b1d0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1184.030950] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f55934fd-d54b-46ad-8595-72c1b89aa45a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.039765] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbbd12e3-4ef7-407b-b568-d9c10219a861 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.073863] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdd01062-85ab-4290-bec0-bc7cf5fbf858 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.082541] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e41e2a6b-4f50-4ce5-aa9b-d8fb30450af9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.097497] env[63538]: DEBUG nova.compute.provider_tree [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1184.345900] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1184.346262] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9f38992f-4ca2-4706-b713-c7f824bcd2ad {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.355030] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1184.355030] env[63538]: value = "task-5101848" [ 1184.355030] env[63538]: _type = "Task" [ 1184.355030] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.366267] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101848, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.601208] env[63538]: DEBUG nova.scheduler.client.report [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1184.865688] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101848, 'name': PowerOffVM_Task, 'duration_secs': 0.133732} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.865688] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1184.865953] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1184.866539] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80c144e5-3fe3-43b4-9f17-0269d82f5d3d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.873884] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1184.874953] env[63538]: DEBUG nova.compute.manager [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1184.876907] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6638fbc0-da39-48b0-9114-0b37522a7cb5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.901435] env[63538]: DEBUG nova.virt.hardware [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1184.901800] env[63538]: DEBUG nova.virt.hardware [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1184.902050] env[63538]: DEBUG nova.virt.hardware [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1184.902288] env[63538]: DEBUG nova.virt.hardware [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1184.902495] env[63538]: DEBUG nova.virt.hardware [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1184.902688] env[63538]: DEBUG nova.virt.hardware [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1184.902943] env[63538]: DEBUG nova.virt.hardware [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1184.903177] env[63538]: DEBUG nova.virt.hardware [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1184.903441] env[63538]: DEBUG nova.virt.hardware [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1184.903596] env[63538]: DEBUG nova.virt.hardware [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1184.903814] env[63538]: DEBUG nova.virt.hardware [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1184.904858] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c501184-52ba-498f-9131-a0f88fbe3136 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.909223] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1184.909415] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1184.909602] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Deleting the datastore file [datastore1] d91a140b-6ca9-4c0e-b433-795d2014975c {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1184.910352] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-34d0e2d2-05bd-4510-993a-8b7f6e2b44f1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.916367] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a98716ff-974d-499c-a61a-8a1ef8785423 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.922314] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1184.922314] env[63538]: value = "task-5101850" [ 1184.922314] env[63538]: _type = "Task" [ 1184.922314] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.940946] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101850, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.107928] env[63538]: DEBUG oslo_concurrency.lockutils [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.254s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.112081] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 5.042s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.141024] env[63538]: DEBUG oslo_concurrency.lockutils [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Acquiring lock "df85b1e1-0319-4619-8680-73bb5d413595" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1185.141324] env[63538]: DEBUG oslo_concurrency.lockutils [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Lock "df85b1e1-0319-4619-8680-73bb5d413595" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.262127] env[63538]: DEBUG nova.compute.manager [req-6e5ad37c-dfd3-44eb-bb3d-c8c8ecd84b0a req-2dd7b812-f64c-454b-8199-f33f4d7b9065 service nova] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Received event network-vif-plugged-8e0bc120-3db7-47f2-924e-a7cb1aad608b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1185.262484] env[63538]: DEBUG oslo_concurrency.lockutils [req-6e5ad37c-dfd3-44eb-bb3d-c8c8ecd84b0a req-2dd7b812-f64c-454b-8199-f33f4d7b9065 service nova] Acquiring lock "58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1185.262837] env[63538]: DEBUG oslo_concurrency.lockutils [req-6e5ad37c-dfd3-44eb-bb3d-c8c8ecd84b0a req-2dd7b812-f64c-454b-8199-f33f4d7b9065 service nova] Lock "58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.263134] env[63538]: DEBUG oslo_concurrency.lockutils [req-6e5ad37c-dfd3-44eb-bb3d-c8c8ecd84b0a req-2dd7b812-f64c-454b-8199-f33f4d7b9065 service nova] Lock "58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.263411] env[63538]: DEBUG nova.compute.manager [req-6e5ad37c-dfd3-44eb-bb3d-c8c8ecd84b0a req-2dd7b812-f64c-454b-8199-f33f4d7b9065 service nova] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] No waiting events found dispatching network-vif-plugged-8e0bc120-3db7-47f2-924e-a7cb1aad608b {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1185.263664] env[63538]: WARNING nova.compute.manager [req-6e5ad37c-dfd3-44eb-bb3d-c8c8ecd84b0a req-2dd7b812-f64c-454b-8199-f33f4d7b9065 service nova] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Received unexpected event network-vif-plugged-8e0bc120-3db7-47f2-924e-a7cb1aad608b for instance with vm_state building and task_state spawning. [ 1185.356235] env[63538]: DEBUG nova.network.neutron [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Successfully updated port: 8e0bc120-3db7-47f2-924e-a7cb1aad608b {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1185.433567] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101850, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.123421} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.433567] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1185.433567] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1185.433567] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1185.615456] env[63538]: DEBUG nova.objects.instance [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lazy-loading 'migration_context' on Instance uuid e0d5a3b2-21e1-4de0-ac10-1a5687a60c10 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1185.622900] env[63538]: DEBUG oslo_concurrency.lockutils [None req-11cd79b7-8364-4b9f-bf58-227f44f4f8eb tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 26.117s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.625973] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 3.320s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.625973] env[63538]: INFO nova.compute.manager [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Unshelving [ 1185.643956] env[63538]: DEBUG nova.compute.manager [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1185.862603] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Acquiring lock "refresh_cache-58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1185.862842] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Acquired lock "refresh_cache-58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.863052] env[63538]: DEBUG nova.network.neutron [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1186.165265] env[63538]: DEBUG oslo_concurrency.lockutils [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1186.275066] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba150d1-1143-4140-a3bd-e427678f2f35 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.283069] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cff0c55-6d81-4cb6-8018-7bfaff601184 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.315511] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d61516-cef4-42b2-8401-29ff51c63d5f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.324179] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf9069e-da7b-4616-8176-57b10af70d4b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.338185] env[63538]: DEBUG nova.compute.provider_tree [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1186.397174] env[63538]: DEBUG nova.network.neutron [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1186.468907] env[63538]: DEBUG nova.virt.hardware [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1186.469178] env[63538]: DEBUG nova.virt.hardware [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1186.469350] env[63538]: DEBUG nova.virt.hardware [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1186.469541] env[63538]: DEBUG nova.virt.hardware [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1186.469697] env[63538]: DEBUG nova.virt.hardware [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1186.469853] env[63538]: DEBUG nova.virt.hardware [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1186.470136] env[63538]: DEBUG nova.virt.hardware [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1186.470314] env[63538]: DEBUG nova.virt.hardware [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1186.470493] env[63538]: DEBUG nova.virt.hardware [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1186.470662] env[63538]: DEBUG nova.virt.hardware [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1186.470858] env[63538]: DEBUG nova.virt.hardware [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1186.471759] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb347cd-d6cf-42e6-a0f6-530a97c90ec1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.482882] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81c27dfb-26c0-44d2-9ad1-e4a70287409c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.498122] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Instance VIF info [] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1186.504206] env[63538]: DEBUG oslo.service.loopingcall [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1186.504504] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1186.504730] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc950894-6fa3-41e9-8611-9dfa047c42de {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.523408] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1186.523408] env[63538]: value = "task-5101851" [ 1186.523408] env[63538]: _type = "Task" [ 1186.523408] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.534594] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101851, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.592369] env[63538]: DEBUG nova.network.neutron [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Updating instance_info_cache with network_info: [{"id": "8e0bc120-3db7-47f2-924e-a7cb1aad608b", "address": "fa:16:3e:c0:1a:49", "network": {"id": "a10914eb-dde5-4fdf-b871-87c0d85cc457", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1120825976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c381e05a12ae4cd4b83e21927e5d0a36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8fedd232-bfc1-4e7f-bd5e-c43ef8f2f08a", "external-id": "nsx-vlan-transportzone-925", "segmentation_id": 925, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e0bc120-3d", "ovs_interfaceid": "8e0bc120-3db7-47f2-924e-a7cb1aad608b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.651259] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1186.841855] env[63538]: DEBUG nova.scheduler.client.report [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1187.039283] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101851, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.095205] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Releasing lock "refresh_cache-58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1187.095673] env[63538]: DEBUG nova.compute.manager [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Instance network_info: |[{"id": "8e0bc120-3db7-47f2-924e-a7cb1aad608b", "address": "fa:16:3e:c0:1a:49", "network": {"id": "a10914eb-dde5-4fdf-b871-87c0d85cc457", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1120825976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c381e05a12ae4cd4b83e21927e5d0a36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8fedd232-bfc1-4e7f-bd5e-c43ef8f2f08a", "external-id": "nsx-vlan-transportzone-925", "segmentation_id": 925, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e0bc120-3d", "ovs_interfaceid": "8e0bc120-3db7-47f2-924e-a7cb1aad608b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1187.096161] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:1a:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8fedd232-bfc1-4e7f-bd5e-c43ef8f2f08a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8e0bc120-3db7-47f2-924e-a7cb1aad608b', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1187.104450] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Creating folder: Project (c381e05a12ae4cd4b83e21927e5d0a36). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1187.104789] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c06e6e38-1a46-4f40-8919-65ee5a282f4f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.118382] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Created folder: Project (c381e05a12ae4cd4b83e21927e5d0a36) in parent group-v992234. [ 1187.118620] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Creating folder: Instances. Parent ref: group-v992542. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1187.118962] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0156153a-dffe-43ae-954e-70cec0fee953 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.130442] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Created folder: Instances in parent group-v992542. [ 1187.130724] env[63538]: DEBUG oslo.service.loopingcall [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1187.130949] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1187.132031] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-914522a8-7fae-4a25-93d1-eb47c484fc81 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.152108] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1187.152108] env[63538]: value = "task-5101854" [ 1187.152108] env[63538]: _type = "Task" [ 1187.152108] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.160713] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101854, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.288703] env[63538]: DEBUG nova.compute.manager [req-8aef4950-53d3-4361-b268-34a9243d1feb req-8e68cd03-0b54-48e4-84d6-d2f96ab7093f service nova] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Received event network-changed-8e0bc120-3db7-47f2-924e-a7cb1aad608b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1187.289098] env[63538]: DEBUG nova.compute.manager [req-8aef4950-53d3-4361-b268-34a9243d1feb req-8e68cd03-0b54-48e4-84d6-d2f96ab7093f service nova] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Refreshing instance network info cache due to event network-changed-8e0bc120-3db7-47f2-924e-a7cb1aad608b. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1187.289403] env[63538]: DEBUG oslo_concurrency.lockutils [req-8aef4950-53d3-4361-b268-34a9243d1feb req-8e68cd03-0b54-48e4-84d6-d2f96ab7093f service nova] Acquiring lock "refresh_cache-58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1187.289613] env[63538]: DEBUG oslo_concurrency.lockutils [req-8aef4950-53d3-4361-b268-34a9243d1feb req-8e68cd03-0b54-48e4-84d6-d2f96ab7093f service nova] Acquired lock "refresh_cache-58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.289888] env[63538]: DEBUG nova.network.neutron [req-8aef4950-53d3-4361-b268-34a9243d1feb req-8e68cd03-0b54-48e4-84d6-d2f96ab7093f service nova] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Refreshing network info cache for port 8e0bc120-3db7-47f2-924e-a7cb1aad608b {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1187.535405] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101851, 'name': CreateVM_Task, 'duration_secs': 0.535279} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.535603] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1187.535970] env[63538]: DEBUG oslo_concurrency.lockutils [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1187.536264] env[63538]: DEBUG oslo_concurrency.lockutils [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.536679] env[63538]: DEBUG oslo_concurrency.lockutils [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1187.537039] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59c75779-399c-479a-9348-99b5bdccf3b5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.542906] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1187.542906] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e17985-ec64-ac55-53ac-2b767f3d61cb" [ 1187.542906] env[63538]: _type = "Task" [ 1187.542906] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.552151] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e17985-ec64-ac55-53ac-2b767f3d61cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.663751] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101854, 'name': CreateVM_Task, 'duration_secs': 0.391434} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.663935] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1187.671764] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1187.852822] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.741s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1187.858632] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 4.962s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1187.858819] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1187.858977] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63538) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1187.859381] env[63538]: DEBUG oslo_concurrency.lockutils [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.694s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1187.861522] env[63538]: INFO nova.compute.claims [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1187.867204] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b20a4712-40f6-4709-be95-65b1d8175d80 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.884707] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3f8c200-ffcf-46c3-814e-a13e013ac419 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.909620] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d5e9e03-1f8d-4204-8362-5f8fb2aed4f2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.918788] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5fecdd1-5103-40a7-8c67-d6e0c3180b1d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.952382] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179258MB free_disk=95GB free_vcpus=48 pci_devices=None {{(pid=63538) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1187.952611] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1188.054613] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e17985-ec64-ac55-53ac-2b767f3d61cb, 'name': SearchDatastore_Task, 'duration_secs': 0.01054} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.054613] env[63538]: DEBUG oslo_concurrency.lockutils [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1188.054895] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1188.054990] env[63538]: DEBUG oslo_concurrency.lockutils [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1188.055164] env[63538]: DEBUG oslo_concurrency.lockutils [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1188.055350] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1188.055638] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1188.055959] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1188.056295] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6c349f3-afee-49ab-bb20-f0db7443be8a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.058066] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-326fd05d-ea5a-493a-8907-6f82cc35a095 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.064549] env[63538]: DEBUG oslo_vmware.api [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Waiting for the task: (returnval){ [ 1188.064549] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5244028a-d253-4922-4867-d15aa0eb74bb" [ 1188.064549] env[63538]: _type = "Task" [ 1188.064549] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.068891] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1188.069121] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1188.070228] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7be65fa-6af0-4f19-adcd-1f910c2d8ea5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.075722] env[63538]: DEBUG oslo_vmware.api [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5244028a-d253-4922-4867-d15aa0eb74bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.079892] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1188.079892] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52593723-3d65-52e0-60dc-97672030c4d3" [ 1188.079892] env[63538]: _type = "Task" [ 1188.079892] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.088659] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52593723-3d65-52e0-60dc-97672030c4d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.089544] env[63538]: DEBUG nova.network.neutron [req-8aef4950-53d3-4361-b268-34a9243d1feb req-8e68cd03-0b54-48e4-84d6-d2f96ab7093f service nova] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Updated VIF entry in instance network info cache for port 8e0bc120-3db7-47f2-924e-a7cb1aad608b. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1188.089884] env[63538]: DEBUG nova.network.neutron [req-8aef4950-53d3-4361-b268-34a9243d1feb req-8e68cd03-0b54-48e4-84d6-d2f96ab7093f service nova] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Updating instance_info_cache with network_info: [{"id": "8e0bc120-3db7-47f2-924e-a7cb1aad608b", "address": "fa:16:3e:c0:1a:49", "network": {"id": "a10914eb-dde5-4fdf-b871-87c0d85cc457", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1120825976-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c381e05a12ae4cd4b83e21927e5d0a36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8fedd232-bfc1-4e7f-bd5e-c43ef8f2f08a", "external-id": "nsx-vlan-transportzone-925", "segmentation_id": 925, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e0bc120-3d", "ovs_interfaceid": "8e0bc120-3db7-47f2-924e-a7cb1aad608b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.576057] env[63538]: DEBUG oslo_vmware.api [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5244028a-d253-4922-4867-d15aa0eb74bb, 'name': SearchDatastore_Task, 'duration_secs': 0.019862} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.576439] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1188.576663] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1188.576878] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1188.590243] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52593723-3d65-52e0-60dc-97672030c4d3, 'name': SearchDatastore_Task, 'duration_secs': 0.00975} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.591037] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6afe4264-2dea-4938-9fc7-41be91119431 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.593558] env[63538]: DEBUG oslo_concurrency.lockutils [req-8aef4950-53d3-4361-b268-34a9243d1feb req-8e68cd03-0b54-48e4-84d6-d2f96ab7093f service nova] Releasing lock "refresh_cache-58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1188.597925] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1188.597925] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52df1c09-f9a8-a1ca-1011-bd57114def7d" [ 1188.597925] env[63538]: _type = "Task" [ 1188.597925] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.606507] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52df1c09-f9a8-a1ca-1011-bd57114def7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.088848] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223e05f6-5b80-4aa5-841a-e7be94c9ac27 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.107493] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b3615fe-fb98-4a78-a77c-df42c86301cb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.122561] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52df1c09-f9a8-a1ca-1011-bd57114def7d, 'name': SearchDatastore_Task, 'duration_secs': 0.012904} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.163299] env[63538]: DEBUG oslo_concurrency.lockutils [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1189.163790] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] d91a140b-6ca9-4c0e-b433-795d2014975c/d91a140b-6ca9-4c0e-b433-795d2014975c.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1189.164656] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.165035] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1189.165404] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b1c03983-300d-41fd-b882-cdbd5b4a4380 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.169739] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1a5498-8cdf-414a-b3ee-8cd8003e58c4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.174389] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d318800a-c5b5-45b9-a0ce-b9a88ce0c820 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.187200] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11bf6e0f-14cf-4285-8a3c-bef13aafb56a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.194595] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1189.194595] env[63538]: value = "task-5101855" [ 1189.194595] env[63538]: _type = "Task" [ 1189.194595] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.196630] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1189.196978] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1189.203282] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4178593e-ecda-4cf8-bfee-355a6ce665ac {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.220819] env[63538]: DEBUG nova.compute.provider_tree [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1189.229725] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101855, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.231350] env[63538]: DEBUG oslo_vmware.api [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Waiting for the task: (returnval){ [ 1189.231350] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5215501b-b931-3eb9-e0ff-dc6d88f530f2" [ 1189.231350] env[63538]: _type = "Task" [ 1189.231350] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.242594] env[63538]: DEBUG oslo_vmware.api [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5215501b-b931-3eb9-e0ff-dc6d88f530f2, 'name': SearchDatastore_Task, 'duration_secs': 0.012541} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.243519] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9d75e89-5061-483e-b6a1-5edc068b4813 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.250051] env[63538]: DEBUG oslo_vmware.api [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Waiting for the task: (returnval){ [ 1189.250051] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5220195f-efd0-9561-6fbb-828442e330fe" [ 1189.250051] env[63538]: _type = "Task" [ 1189.250051] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.259531] env[63538]: DEBUG oslo_vmware.api [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5220195f-efd0-9561-6fbb-828442e330fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.409918] env[63538]: INFO nova.compute.manager [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Swapping old allocation on dict_keys(['f65218a4-1d3d-476a-9093-01cae92c8635']) held by migration 29520b30-b1da-45d3-b4d7-f7d1bd5d790a for instance [ 1189.434728] env[63538]: DEBUG nova.scheduler.client.report [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Overwriting current allocation {'allocations': {'f65218a4-1d3d-476a-9093-01cae92c8635': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 163}}, 'project_id': '0d6954a5254f441ca256c85330297cef', 'user_id': 'cd6de144ccc4498aa90ae01ca7a0f6f1', 'consumer_generation': 1} on consumer e0d5a3b2-21e1-4de0-ac10-1a5687a60c10 {{(pid=63538) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1189.541137] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "refresh_cache-e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1189.541346] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired lock "refresh_cache-e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.541646] env[63538]: DEBUG nova.network.neutron [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1189.706592] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101855, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.724937] env[63538]: DEBUG nova.scheduler.client.report [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1189.765720] env[63538]: DEBUG oslo_vmware.api [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5220195f-efd0-9561-6fbb-828442e330fe, 'name': SearchDatastore_Task, 'duration_secs': 0.021187} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.766472] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1189.766789] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a/58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1189.767208] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-41b39f16-9967-4dd7-9dfc-f7d5a3621f0e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.778055] env[63538]: DEBUG oslo_vmware.api [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Waiting for the task: (returnval){ [ 1189.778055] env[63538]: value = "task-5101856" [ 1189.778055] env[63538]: _type = "Task" [ 1189.778055] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.787956] env[63538]: DEBUG oslo_vmware.api [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Task: {'id': task-5101856, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.210262] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101855, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.835607} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.210511] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] d91a140b-6ca9-4c0e-b433-795d2014975c/d91a140b-6ca9-4c0e-b433-795d2014975c.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1190.210734] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1190.211021] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-35027b77-271e-4c68-9ca2-f6a67f7630f7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.219806] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1190.219806] env[63538]: value = "task-5101857" [ 1190.219806] env[63538]: _type = "Task" [ 1190.219806] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.231697] env[63538]: DEBUG oslo_concurrency.lockutils [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.372s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1190.232335] env[63538]: DEBUG nova.compute.manager [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1190.235225] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101857, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.235834] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.585s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1190.236105] env[63538]: DEBUG nova.objects.instance [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lazy-loading 'pci_requests' on Instance uuid d00151c1-ca34-4c57-9ed2-74d506a0cffb {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1190.293994] env[63538]: DEBUG oslo_vmware.api [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Task: {'id': task-5101856, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.414185] env[63538]: DEBUG nova.network.neutron [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Updating instance_info_cache with network_info: [{"id": "c8b86754-970c-4f8a-b3fb-ec8fb42d3863", "address": "fa:16:3e:e0:99:90", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8b86754-97", "ovs_interfaceid": "c8b86754-970c-4f8a-b3fb-ec8fb42d3863", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1190.730355] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101857, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.195639} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.730809] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1190.731410] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-194a72ef-2cdb-49c8-9fc6-9ef22d357df6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.744309] env[63538]: DEBUG nova.compute.utils [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1190.747062] env[63538]: DEBUG nova.objects.instance [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lazy-loading 'numa_topology' on Instance uuid d00151c1-ca34-4c57-9ed2-74d506a0cffb {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1190.756496] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] d91a140b-6ca9-4c0e-b433-795d2014975c/d91a140b-6ca9-4c0e-b433-795d2014975c.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1190.757951] env[63538]: INFO nova.compute.claims [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1190.760488] env[63538]: DEBUG nova.compute.manager [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1190.762944] env[63538]: DEBUG nova.compute.manager [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Not allocating networking since 'none' was specified. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1190.763150] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c130122c-3402-463e-9bbd-7619c8800538 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.786066] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1190.786066] env[63538]: value = "task-5101858" [ 1190.786066] env[63538]: _type = "Task" [ 1190.786066] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.792838] env[63538]: DEBUG oslo_vmware.api [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Task: {'id': task-5101856, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.862514} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.794465] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a/58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1190.794768] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1190.795323] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ee830424-f709-42e1-ad14-4f4493d27047 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.802659] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101858, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.807759] env[63538]: DEBUG oslo_vmware.api [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Waiting for the task: (returnval){ [ 1190.807759] env[63538]: value = "task-5101859" [ 1190.807759] env[63538]: _type = "Task" [ 1190.807759] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.818201] env[63538]: DEBUG oslo_vmware.api [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Task: {'id': task-5101859, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.917783] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Releasing lock "refresh_cache-e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1190.918475] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1190.918849] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0f7143d5-3cc6-41dd-ae88-8de20501a14b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.926792] env[63538]: DEBUG oslo_vmware.api [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1190.926792] env[63538]: value = "task-5101860" [ 1190.926792] env[63538]: _type = "Task" [ 1190.926792] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.936511] env[63538]: DEBUG oslo_vmware.api [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101860, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.297594] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101858, 'name': ReconfigVM_Task, 'duration_secs': 0.407428} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.297594] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Reconfigured VM instance instance-00000073 to attach disk [datastore1] d91a140b-6ca9-4c0e-b433-795d2014975c/d91a140b-6ca9-4c0e-b433-795d2014975c.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1191.298175] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-00affaef-8396-4765-976d-d1471368159b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.305411] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1191.305411] env[63538]: value = "task-5101861" [ 1191.305411] env[63538]: _type = "Task" [ 1191.305411] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.317608] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101861, 'name': Rename_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.320838] env[63538]: DEBUG oslo_vmware.api [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Task: {'id': task-5101859, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077641} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.321113] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1191.321896] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2beebfda-25dd-48aa-82d6-2876ae71cac8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.348040] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a/58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1191.348433] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-203ed5f5-8d27-4a9e-bb44-d9a62624f566 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.368806] env[63538]: DEBUG oslo_vmware.api [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Waiting for the task: (returnval){ [ 1191.368806] env[63538]: value = "task-5101862" [ 1191.368806] env[63538]: _type = "Task" [ 1191.368806] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.377773] env[63538]: DEBUG oslo_vmware.api [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Task: {'id': task-5101862, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.438621] env[63538]: DEBUG oslo_vmware.api [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101860, 'name': PowerOffVM_Task, 'duration_secs': 0.227415} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.439627] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1191.440416] env[63538]: DEBUG nova.virt.hardware [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1191.440689] env[63538]: DEBUG nova.virt.hardware [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1191.440839] env[63538]: DEBUG nova.virt.hardware [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1191.441292] env[63538]: DEBUG nova.virt.hardware [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1191.441461] env[63538]: DEBUG nova.virt.hardware [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1191.441700] env[63538]: DEBUG nova.virt.hardware [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1191.441964] env[63538]: DEBUG nova.virt.hardware [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1191.442187] env[63538]: DEBUG nova.virt.hardware [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1191.442401] env[63538]: DEBUG nova.virt.hardware [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1191.442605] env[63538]: DEBUG nova.virt.hardware [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1191.442968] env[63538]: DEBUG nova.virt.hardware [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1191.448092] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df07e238-c1bf-4daf-927b-0c21d8429106 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.464026] env[63538]: DEBUG oslo_vmware.api [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1191.464026] env[63538]: value = "task-5101863" [ 1191.464026] env[63538]: _type = "Task" [ 1191.464026] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.475197] env[63538]: DEBUG oslo_vmware.api [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101863, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.786354] env[63538]: DEBUG nova.compute.manager [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1191.821620] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101861, 'name': Rename_Task, 'duration_secs': 0.160407} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.823928] env[63538]: DEBUG nova.virt.hardware [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1191.824205] env[63538]: DEBUG nova.virt.hardware [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1191.824375] env[63538]: DEBUG nova.virt.hardware [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1191.824574] env[63538]: DEBUG nova.virt.hardware [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1191.824872] env[63538]: DEBUG nova.virt.hardware [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1191.825094] env[63538]: DEBUG nova.virt.hardware [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1191.825354] env[63538]: DEBUG nova.virt.hardware [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1191.825504] env[63538]: DEBUG nova.virt.hardware [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1191.825733] env[63538]: DEBUG nova.virt.hardware [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1191.825931] env[63538]: DEBUG nova.virt.hardware [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1191.826135] env[63538]: DEBUG nova.virt.hardware [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1191.826475] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1191.827349] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1568339-f481-486d-9452-cedf2d84e9f3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.832405] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8427c82e-f751-4a88-8252-54e6a7b6c656 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.842076] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73cb2f67-3f54-4d84-b550-c9bdb52176c8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.846449] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1191.846449] env[63538]: value = "task-5101864" [ 1191.846449] env[63538]: _type = "Task" [ 1191.846449] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.863777] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Instance VIF info [] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1191.870277] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Creating folder: Project (201d7771409c48b38f391f0ae8f29bbb). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1191.875259] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cdcb2066-5bbe-4def-b7db-db3d50a27681 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.886613] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101864, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.894525] env[63538]: DEBUG oslo_vmware.api [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Task: {'id': task-5101862, 'name': ReconfigVM_Task, 'duration_secs': 0.322992} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.894992] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Reconfigured VM instance instance-00000074 to attach disk [datastore1] 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a/58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1191.895589] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4a7c6842-c5a7-413b-b137-237649f8b9b1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.902714] env[63538]: DEBUG oslo_vmware.api [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Waiting for the task: (returnval){ [ 1191.902714] env[63538]: value = "task-5101866" [ 1191.902714] env[63538]: _type = "Task" [ 1191.902714] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.910250] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Created folder: Project (201d7771409c48b38f391f0ae8f29bbb) in parent group-v992234. [ 1191.910493] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Creating folder: Instances. Parent ref: group-v992545. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1191.910757] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f03762e0-96a2-4002-a7a6-91f9fda2cab9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.915754] env[63538]: DEBUG oslo_vmware.api [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Task: {'id': task-5101866, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.925755] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Created folder: Instances in parent group-v992545. [ 1191.926102] env[63538]: DEBUG oslo.service.loopingcall [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1191.926401] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1191.926656] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-498925ad-3a02-456c-90ad-f2e39552711e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.949038] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1191.949038] env[63538]: value = "task-5101868" [ 1191.949038] env[63538]: _type = "Task" [ 1191.949038] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.958890] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101868, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.977942] env[63538]: DEBUG oslo_vmware.api [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101863, 'name': ReconfigVM_Task, 'duration_secs': 0.152185} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.979013] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a4a17cc-f315-4203-b93c-489bc72c5f3a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.984470] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56d5497b-5736-4d6e-8165-f43c3eb8e494 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.004785] env[63538]: DEBUG nova.virt.hardware [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1192.005076] env[63538]: DEBUG nova.virt.hardware [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1192.005291] env[63538]: DEBUG nova.virt.hardware [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1192.005492] env[63538]: DEBUG nova.virt.hardware [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1192.005795] env[63538]: DEBUG nova.virt.hardware [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1192.005795] env[63538]: DEBUG nova.virt.hardware [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1192.006017] env[63538]: DEBUG nova.virt.hardware [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1192.006242] env[63538]: DEBUG nova.virt.hardware [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1192.006436] env[63538]: DEBUG nova.virt.hardware [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1192.006611] env[63538]: DEBUG nova.virt.hardware [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1192.006790] env[63538]: DEBUG nova.virt.hardware [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1192.008261] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a42cb137-710b-4bcf-b325-d2d401b5582a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.014847] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5df3a8a-25a6-435a-8c9f-44387f67ba84 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.020231] env[63538]: DEBUG oslo_vmware.api [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1192.020231] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d290b8-d2ff-90a5-8f9b-95ec88be276a" [ 1192.020231] env[63538]: _type = "Task" [ 1192.020231] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.052146] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ef162f-a822-4514-bab3-aee37d28aa5e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.058603] env[63538]: DEBUG oslo_vmware.api [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d290b8-d2ff-90a5-8f9b-95ec88be276a, 'name': SearchDatastore_Task, 'duration_secs': 0.01108} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.064446] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Reconfiguring VM instance instance-0000006b to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1192.067168] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03234b27-22c0-4be5-b5ca-853fe7f630ab {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.082103] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cd16507-3f99-4f80-acf3-9539a7fec9e4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.103422] env[63538]: DEBUG nova.compute.provider_tree [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1192.108331] env[63538]: DEBUG oslo_vmware.api [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1192.108331] env[63538]: value = "task-5101869" [ 1192.108331] env[63538]: _type = "Task" [ 1192.108331] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.118543] env[63538]: DEBUG oslo_vmware.api [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101869, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.360477] env[63538]: DEBUG oslo_vmware.api [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101864, 'name': PowerOnVM_Task, 'duration_secs': 0.496501} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.360748] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1192.360963] env[63538]: DEBUG nova.compute.manager [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1192.361784] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db6762a6-1649-4403-bc0f-0d034369d3bc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.413616] env[63538]: DEBUG oslo_vmware.api [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Task: {'id': task-5101866, 'name': Rename_Task, 'duration_secs': 0.269665} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.413886] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1192.414170] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-311be520-a08a-4df8-ac78-b3e688ec1030 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.420651] env[63538]: DEBUG oslo_vmware.api [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Waiting for the task: (returnval){ [ 1192.420651] env[63538]: value = "task-5101870" [ 1192.420651] env[63538]: _type = "Task" [ 1192.420651] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.429357] env[63538]: DEBUG oslo_vmware.api [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Task: {'id': task-5101870, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.461159] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101868, 'name': CreateVM_Task, 'duration_secs': 0.318629} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.461391] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1192.461775] env[63538]: DEBUG oslo_concurrency.lockutils [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1192.461944] env[63538]: DEBUG oslo_concurrency.lockutils [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.462320] env[63538]: DEBUG oslo_concurrency.lockutils [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1192.462602] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a704686-9cde-44c2-adf3-43c35fcd5502 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.468667] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Waiting for the task: (returnval){ [ 1192.468667] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5282e3f0-a0a6-58a2-8fb7-254aaf4b2d64" [ 1192.468667] env[63538]: _type = "Task" [ 1192.468667] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.478295] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5282e3f0-a0a6-58a2-8fb7-254aaf4b2d64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.609791] env[63538]: DEBUG nova.scheduler.client.report [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1192.624877] env[63538]: DEBUG oslo_vmware.api [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101869, 'name': ReconfigVM_Task, 'duration_secs': 0.233674} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.625366] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Reconfigured VM instance instance-0000006b to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1192.626514] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd85e4c-7ef9-420d-ade9-f352d33757e1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.655451] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] e0d5a3b2-21e1-4de0-ac10-1a5687a60c10/e0d5a3b2-21e1-4de0-ac10-1a5687a60c10.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1192.656300] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e79aee83-3765-48d3-804d-1b9dbca6d7a8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.675031] env[63538]: DEBUG oslo_vmware.api [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1192.675031] env[63538]: value = "task-5101871" [ 1192.675031] env[63538]: _type = "Task" [ 1192.675031] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.684996] env[63538]: DEBUG oslo_vmware.api [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101871, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.884571] env[63538]: DEBUG oslo_concurrency.lockutils [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.935602] env[63538]: DEBUG oslo_vmware.api [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Task: {'id': task-5101870, 'name': PowerOnVM_Task} progress is 98%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.981603] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5282e3f0-a0a6-58a2-8fb7-254aaf4b2d64, 'name': SearchDatastore_Task, 'duration_secs': 0.012633} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.981930] env[63538]: DEBUG oslo_concurrency.lockutils [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1192.982216] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1192.982463] env[63538]: DEBUG oslo_concurrency.lockutils [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1192.982618] env[63538]: DEBUG oslo_concurrency.lockutils [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.982804] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1192.983114] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3ffa651a-0c22-4a4d-9342-3c387b327dc5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.995390] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1192.995628] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1192.996509] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7fdeae4-a7f6-4dfd-88fd-671aa46104c8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.003662] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Waiting for the task: (returnval){ [ 1193.003662] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522f92cb-f487-7fba-1571-ce1d8cb9d362" [ 1193.003662] env[63538]: _type = "Task" [ 1193.003662] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.014048] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522f92cb-f487-7fba-1571-ce1d8cb9d362, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.119312] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.883s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.121644] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 5.169s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.168379] env[63538]: INFO nova.network.neutron [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Updating port 319cec1c-49eb-43a4-a9ec-6b74a507b6d6 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1193.189556] env[63538]: DEBUG oslo_vmware.api [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101871, 'name': ReconfigVM_Task, 'duration_secs': 0.490021} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.190117] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Reconfigured VM instance instance-0000006b to attach disk [datastore2] e0d5a3b2-21e1-4de0-ac10-1a5687a60c10/e0d5a3b2-21e1-4de0-ac10-1a5687a60c10.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1193.191411] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-035245ee-3833-4ea6-9a14-6e6fb4acb7aa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.223416] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6975694-70c6-44dd-911e-f195a7e025f3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.254100] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc8c6dd-af41-42a3-8381-1fd2e4c5c482 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.284183] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e65b40-10b2-482e-b65a-c9073b955fbc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.295279] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1193.295715] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7149c1ef-2f1d-47b6-852c-6bbd20057a02 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.305349] env[63538]: DEBUG oslo_vmware.api [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1193.305349] env[63538]: value = "task-5101872" [ 1193.305349] env[63538]: _type = "Task" [ 1193.305349] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.317914] env[63538]: DEBUG oslo_vmware.api [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101872, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.436548] env[63538]: DEBUG oslo_vmware.api [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Task: {'id': task-5101870, 'name': PowerOnVM_Task, 'duration_secs': 0.61412} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.436548] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1193.436548] env[63538]: INFO nova.compute.manager [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Took 8.56 seconds to spawn the instance on the hypervisor. [ 1193.436548] env[63538]: DEBUG nova.compute.manager [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1193.437858] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef48a4e4-941b-46be-9011-037d68d4c867 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.516887] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522f92cb-f487-7fba-1571-ce1d8cb9d362, 'name': SearchDatastore_Task, 'duration_secs': 0.026757} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.517809] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-337f3a7b-9551-460f-aaa5-83885f2a5d74 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.524363] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Waiting for the task: (returnval){ [ 1193.524363] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e48875-3b7e-64a4-c6b1-2ecc0db5b44d" [ 1193.524363] env[63538]: _type = "Task" [ 1193.524363] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.534488] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e48875-3b7e-64a4-c6b1-2ecc0db5b44d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.817905] env[63538]: DEBUG oslo_vmware.api [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101872, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.959073] env[63538]: INFO nova.compute.manager [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Took 15.65 seconds to build instance. [ 1193.982648] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquiring lock "d91a140b-6ca9-4c0e-b433-795d2014975c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.982931] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Lock "d91a140b-6ca9-4c0e-b433-795d2014975c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.983167] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquiring lock "d91a140b-6ca9-4c0e-b433-795d2014975c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.983410] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Lock "d91a140b-6ca9-4c0e-b433-795d2014975c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.983737] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Lock "d91a140b-6ca9-4c0e-b433-795d2014975c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.986053] env[63538]: INFO nova.compute.manager [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Terminating instance [ 1193.987884] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquiring lock "refresh_cache-d91a140b-6ca9-4c0e-b433-795d2014975c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1193.988049] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquired lock "refresh_cache-d91a140b-6ca9-4c0e-b433-795d2014975c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1193.988225] env[63538]: DEBUG nova.network.neutron [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1194.035396] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e48875-3b7e-64a4-c6b1-2ecc0db5b44d, 'name': SearchDatastore_Task, 'duration_secs': 0.013256} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.036238] env[63538]: DEBUG oslo_concurrency.lockutils [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1194.036238] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] df85b1e1-0319-4619-8680-73bb5d413595/df85b1e1-0319-4619-8680-73bb5d413595.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1194.036238] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-463ef891-9997-4763-a033-640749fd7d66 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.043947] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Waiting for the task: (returnval){ [ 1194.043947] env[63538]: value = "task-5101873" [ 1194.043947] env[63538]: _type = "Task" [ 1194.043947] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.053284] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101873, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.161435] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance fb26fb32-a420-4667-850c-e32786edd8f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1194.161613] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 048573b4-26db-4a62-81e0-1bc1c3999d02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1194.161803] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance e3feec17-ca1b-4873-bb0a-370c3868aabf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1194.161969] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance f0183c1f-4557-45fd-ba65-4821ef661173 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1194.162150] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 6a8de8d2-aa15-4057-a936-57cad9c8b1d0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1194.162317] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance d91a140b-6ca9-4c0e-b433-795d2014975c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1194.162487] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1194.162651] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance d00151c1-ca34-4c57-9ed2-74d506a0cffb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1194.162799] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance df85b1e1-0319-4619-8680-73bb5d413595 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1194.162957] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance e0d5a3b2-21e1-4de0-ac10-1a5687a60c10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1194.163256] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=63538) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1194.163470] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '10', 'num_vm_active': '6', 'num_task_None': '6', 'num_os_type_None': '10', 'num_proj_df090f9a727d4cf4a0f466e27928bdc6': '2', 'io_workload': '2', 'num_vm_shelved_offloaded': '1', 'num_task_spawning': '3', 'num_proj_1fe11c1386b14d139f4416cbf20fb201': '1', 'num_vm_resized': '1', 'num_task_resize_reverting': '1', 'num_proj_0d6954a5254f441ca256c85330297cef': '1', 'num_proj_9b1eba931f144b94b6e186dac1310dfa': '1', 'num_proj_1a06b7cc1ab24ba584bbe970e4fc5e81': '1', 'num_proj_affa555448014b6aaf78be0467181790': '2', 'num_vm_building': '2', 'num_proj_c381e05a12ae4cd4b83e21927e5d0a36': '1', 'num_proj_201d7771409c48b38f391f0ae8f29bbb': '1'} {{(pid=63538) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1194.320552] env[63538]: DEBUG oslo_vmware.api [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101872, 'name': PowerOnVM_Task, 'duration_secs': 0.878394} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.320834] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1194.328743] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a528196-a25e-4134-a113-5583d46e9dbb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.339338] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77fe660c-c3de-4611-8221-5162fe4cfdaa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.378668] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e09614e4-e06d-4ff8-93c1-cbec6ce16306 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.388829] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abde1280-736c-4884-a042-a29284369da6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.405839] env[63538]: DEBUG nova.compute.provider_tree [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1194.461663] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ff7b8923-4b8f-40c9-93b1-619950a57415 tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Lock "58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.163s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1194.462068] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 12.085s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.463059] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef2cd5c-126c-40ee-b87b-52cf73b251a3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.508986] env[63538]: DEBUG nova.network.neutron [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1194.554239] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101873, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49289} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.555324] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] df85b1e1-0319-4619-8680-73bb5d413595/df85b1e1-0319-4619-8680-73bb5d413595.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1194.555324] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1194.555324] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-29c67d62-c9fe-41a2-8938-5c3d8cbe5fbc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.563648] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Waiting for the task: (returnval){ [ 1194.563648] env[63538]: value = "task-5101874" [ 1194.563648] env[63538]: _type = "Task" [ 1194.563648] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.574853] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101874, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.602492] env[63538]: DEBUG nova.network.neutron [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.712101] env[63538]: DEBUG nova.compute.manager [req-7ca662ae-f1c4-4ba6-b59f-28ceadd52ae8 req-4d655007-5b7f-430c-9c9f-ffad8375c955 service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Received event network-vif-plugged-319cec1c-49eb-43a4-a9ec-6b74a507b6d6 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1194.712472] env[63538]: DEBUG oslo_concurrency.lockutils [req-7ca662ae-f1c4-4ba6-b59f-28ceadd52ae8 req-4d655007-5b7f-430c-9c9f-ffad8375c955 service nova] Acquiring lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.713445] env[63538]: DEBUG oslo_concurrency.lockutils [req-7ca662ae-f1c4-4ba6-b59f-28ceadd52ae8 req-4d655007-5b7f-430c-9c9f-ffad8375c955 service nova] Lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.713445] env[63538]: DEBUG oslo_concurrency.lockutils [req-7ca662ae-f1c4-4ba6-b59f-28ceadd52ae8 req-4d655007-5b7f-430c-9c9f-ffad8375c955 service nova] Lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1194.713445] env[63538]: DEBUG nova.compute.manager [req-7ca662ae-f1c4-4ba6-b59f-28ceadd52ae8 req-4d655007-5b7f-430c-9c9f-ffad8375c955 service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] No waiting events found dispatching network-vif-plugged-319cec1c-49eb-43a4-a9ec-6b74a507b6d6 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1194.713445] env[63538]: WARNING nova.compute.manager [req-7ca662ae-f1c4-4ba6-b59f-28ceadd52ae8 req-4d655007-5b7f-430c-9c9f-ffad8375c955 service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Received unexpected event network-vif-plugged-319cec1c-49eb-43a4-a9ec-6b74a507b6d6 for instance with vm_state shelved_offloaded and task_state spawning. [ 1194.812539] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "refresh_cache-d00151c1-ca34-4c57-9ed2-74d506a0cffb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1194.812899] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquired lock "refresh_cache-d00151c1-ca34-4c57-9ed2-74d506a0cffb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.813248] env[63538]: DEBUG nova.network.neutron [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1194.909033] env[63538]: DEBUG nova.scheduler.client.report [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1194.975502] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.513s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1195.075701] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101874, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071237} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.076043] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1195.076840] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f28ead-3191-41c6-bad0-7d0fe2401f32 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.101484] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] df85b1e1-0319-4619-8680-73bb5d413595/df85b1e1-0319-4619-8680-73bb5d413595.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1195.101484] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82c08fa8-9ce8-40d1-b72e-f5bf35201585 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.114883] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Releasing lock "refresh_cache-d91a140b-6ca9-4c0e-b433-795d2014975c" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1195.115397] env[63538]: DEBUG nova.compute.manager [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1195.115625] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1195.116495] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19beeed0-9dc5-493e-96d9-9c263ed2796a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.125687] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1195.127377] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4f069a5a-a085-49f0-8217-980e46ef1237 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.129279] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Waiting for the task: (returnval){ [ 1195.129279] env[63538]: value = "task-5101875" [ 1195.129279] env[63538]: _type = "Task" [ 1195.129279] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.134769] env[63538]: DEBUG oslo_vmware.api [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1195.134769] env[63538]: value = "task-5101876" [ 1195.134769] env[63538]: _type = "Task" [ 1195.134769] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.142164] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101875, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.148919] env[63538]: DEBUG oslo_vmware.api [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101876, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.335631] env[63538]: INFO nova.compute.manager [None req-a0af2aaa-bfe6-418c-b880-34b22c8f6144 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Updating instance to original state: 'active' [ 1195.415333] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63538) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1195.415333] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.293s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1195.415333] env[63538]: DEBUG oslo_concurrency.lockutils [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 2.530s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1195.415333] env[63538]: DEBUG nova.objects.instance [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63538) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1195.594559] env[63538]: DEBUG nova.network.neutron [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Updating instance_info_cache with network_info: [{"id": "319cec1c-49eb-43a4-a9ec-6b74a507b6d6", "address": "fa:16:3e:ce:c8:c1", "network": {"id": "8fd1ecf4-3813-4ca4-82b4-8ba3f04e3c41", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1232144260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fe11c1386b14d139f4416cbf20fb201", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap319cec1c-49", "ovs_interfaceid": "319cec1c-49eb-43a4-a9ec-6b74a507b6d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.642165] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101875, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.648186] env[63538]: DEBUG oslo_vmware.api [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101876, 'name': PowerOffVM_Task, 'duration_secs': 0.279573} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.648644] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1195.648950] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1195.649311] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0c0b62a3-5dfa-4645-9b9c-2172289eb40b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.678612] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1195.678937] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1195.679083] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Deleting the datastore file [datastore1] d91a140b-6ca9-4c0e-b433-795d2014975c {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1195.679373] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c44e9726-fe6c-4c11-b3ec-11af3bc7a43d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.686720] env[63538]: DEBUG oslo_vmware.api [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1195.686720] env[63538]: value = "task-5101878" [ 1195.686720] env[63538]: _type = "Task" [ 1195.686720] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.696250] env[63538]: DEBUG oslo_vmware.api [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101878, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.758387] env[63538]: DEBUG oslo_concurrency.lockutils [None req-476b17ad-bf1a-4e30-a2d2-1e9c035e4cfb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "048573b4-26db-4a62-81e0-1bc1c3999d02" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1195.758740] env[63538]: DEBUG oslo_concurrency.lockutils [None req-476b17ad-bf1a-4e30-a2d2-1e9c035e4cfb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "048573b4-26db-4a62-81e0-1bc1c3999d02" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1196.100805] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Releasing lock "refresh_cache-d00151c1-ca34-4c57-9ed2-74d506a0cffb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1196.139572] env[63538]: DEBUG nova.virt.hardware [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='fd9d23fc0debd5318943018bff3c7e8d',container_format='bare',created_at=2025-12-12T13:00:26Z,direct_url=,disk_format='vmdk',id=fc39aa85-5f54-44b2-83e7-fcf99170aec7,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-127327074-shelved',owner='1fe11c1386b14d139f4416cbf20fb201',properties=ImageMetaProps,protected=,size=31669248,status='active',tags=,updated_at=2025-12-12T13:00:41Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1196.139872] env[63538]: DEBUG nova.virt.hardware [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1196.140068] env[63538]: DEBUG nova.virt.hardware [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1196.140284] env[63538]: DEBUG nova.virt.hardware [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1196.140463] env[63538]: DEBUG nova.virt.hardware [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1196.140631] env[63538]: DEBUG nova.virt.hardware [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1196.140883] env[63538]: DEBUG nova.virt.hardware [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1196.141109] env[63538]: DEBUG nova.virt.hardware [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1196.141324] env[63538]: DEBUG nova.virt.hardware [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1196.141518] env[63538]: DEBUG nova.virt.hardware [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1196.141729] env[63538]: DEBUG nova.virt.hardware [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1196.142668] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28350f6e-2302-496d-9c80-ffb0de1f5ab4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.149065] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101875, 'name': ReconfigVM_Task, 'duration_secs': 0.518138} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.149790] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Reconfigured VM instance instance-00000075 to attach disk [datastore1] df85b1e1-0319-4619-8680-73bb5d413595/df85b1e1-0319-4619-8680-73bb5d413595.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1196.150456] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-389adf35-2c00-4e3b-b0a1-f3b98ec8c8b2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.156488] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-039c9bd4-d875-470f-b534-417019c948c0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.162935] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Waiting for the task: (returnval){ [ 1196.162935] env[63538]: value = "task-5101879" [ 1196.162935] env[63538]: _type = "Task" [ 1196.162935] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.174485] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ce:c8:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78340140-126f-4ef8-a340-debaa64da3e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '319cec1c-49eb-43a4-a9ec-6b74a507b6d6', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1196.182143] env[63538]: DEBUG oslo.service.loopingcall [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1196.182969] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1196.183212] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d64bd037-8307-4018-904e-645930da31cf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.201408] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101879, 'name': Rename_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.207951] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Acquiring lock "58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.208174] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Lock "58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1196.208406] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Acquiring lock "58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.209069] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Lock "58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1196.209069] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Lock "58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1196.214855] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1196.214855] env[63538]: value = "task-5101880" [ 1196.214855] env[63538]: _type = "Task" [ 1196.214855] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.215219] env[63538]: DEBUG oslo_vmware.api [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101878, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158374} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.215779] env[63538]: INFO nova.compute.manager [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Terminating instance [ 1196.217597] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1196.217845] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1196.218081] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1196.218269] env[63538]: INFO nova.compute.manager [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1196.218530] env[63538]: DEBUG oslo.service.loopingcall [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1196.219139] env[63538]: DEBUG nova.compute.manager [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1196.219347] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1196.222596] env[63538]: DEBUG nova.compute.manager [-] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1196.222711] env[63538]: DEBUG nova.network.neutron [-] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1196.224815] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea8b54dd-380c-4ac5-94e7-e072b379e759 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.234347] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1196.237374] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-20ab9710-5429-4d62-911b-4548cc25492c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.239117] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101880, 'name': CreateVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.248680] env[63538]: DEBUG oslo_vmware.api [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Waiting for the task: (returnval){ [ 1196.248680] env[63538]: value = "task-5101881" [ 1196.248680] env[63538]: _type = "Task" [ 1196.248680] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.252570] env[63538]: DEBUG nova.network.neutron [-] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1196.260796] env[63538]: DEBUG oslo_vmware.api [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Task: {'id': task-5101881, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.262687] env[63538]: INFO nova.compute.manager [None req-476b17ad-bf1a-4e30-a2d2-1e9c035e4cfb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Detaching volume c24d91c9-a15c-486f-a3b8-f0c4b143cda1 [ 1196.313594] env[63538]: INFO nova.virt.block_device [None req-476b17ad-bf1a-4e30-a2d2-1e9c035e4cfb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Attempting to driver detach volume c24d91c9-a15c-486f-a3b8-f0c4b143cda1 from mountpoint /dev/sdb [ 1196.313909] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-476b17ad-bf1a-4e30-a2d2-1e9c035e4cfb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Volume detach. Driver type: vmdk {{(pid=63538) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1196.314466] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-476b17ad-bf1a-4e30-a2d2-1e9c035e4cfb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992526', 'volume_id': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'name': 'volume-c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '048573b4-26db-4a62-81e0-1bc1c3999d02', 'attached_at': '', 'detached_at': '', 'volume_id': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'serial': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1196.315326] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68ec965f-a248-4a2c-8a92-73600c7ba45c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.342406] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30708d7-9bfd-4dd2-b3ac-886d23b89246 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.351527] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4523f0f5-689a-47e2-8c06-23b602ce1fe2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.382735] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d8d2d89-2777-4c61-b853-b2efdaa3db30 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.410624] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-476b17ad-bf1a-4e30-a2d2-1e9c035e4cfb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] The volume has not been displaced from its original location: [datastore1] volume-c24d91c9-a15c-486f-a3b8-f0c4b143cda1/volume-c24d91c9-a15c-486f-a3b8-f0c4b143cda1.vmdk. No consolidation needed. {{(pid=63538) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1196.417074] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-476b17ad-bf1a-4e30-a2d2-1e9c035e4cfb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Reconfiguring VM instance instance-00000067 to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1196.417948] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cbcb6530-dce5-4eff-8143-69cdabae2068 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.436529] env[63538]: DEBUG oslo_concurrency.lockutils [None req-159af363-24de-4bc7-a159-636d6667640b tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1196.444808] env[63538]: DEBUG oslo_vmware.api [None req-476b17ad-bf1a-4e30-a2d2-1e9c035e4cfb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1196.444808] env[63538]: value = "task-5101882" [ 1196.444808] env[63538]: _type = "Task" [ 1196.444808] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.458381] env[63538]: DEBUG oslo_vmware.api [None req-476b17ad-bf1a-4e30-a2d2-1e9c035e4cfb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101882, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.674392] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101879, 'name': Rename_Task, 'duration_secs': 0.164942} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.674707] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1196.675027] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e41b1210-4697-473e-9304-aaec53df26d1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.682141] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Waiting for the task: (returnval){ [ 1196.682141] env[63538]: value = "task-5101883" [ 1196.682141] env[63538]: _type = "Task" [ 1196.682141] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.690935] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101883, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.728644] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101880, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.742128] env[63538]: DEBUG nova.compute.manager [req-66cc849d-8697-4024-b46b-695d20b37a8e req-7b27b11a-af5f-4be4-897b-d0c05619c55e service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Received event network-changed-319cec1c-49eb-43a4-a9ec-6b74a507b6d6 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1196.742302] env[63538]: DEBUG nova.compute.manager [req-66cc849d-8697-4024-b46b-695d20b37a8e req-7b27b11a-af5f-4be4-897b-d0c05619c55e service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Refreshing instance network info cache due to event network-changed-319cec1c-49eb-43a4-a9ec-6b74a507b6d6. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1196.742555] env[63538]: DEBUG oslo_concurrency.lockutils [req-66cc849d-8697-4024-b46b-695d20b37a8e req-7b27b11a-af5f-4be4-897b-d0c05619c55e service nova] Acquiring lock "refresh_cache-d00151c1-ca34-4c57-9ed2-74d506a0cffb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1196.742755] env[63538]: DEBUG oslo_concurrency.lockutils [req-66cc849d-8697-4024-b46b-695d20b37a8e req-7b27b11a-af5f-4be4-897b-d0c05619c55e service nova] Acquired lock "refresh_cache-d00151c1-ca34-4c57-9ed2-74d506a0cffb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.742966] env[63538]: DEBUG nova.network.neutron [req-66cc849d-8697-4024-b46b-695d20b37a8e req-7b27b11a-af5f-4be4-897b-d0c05619c55e service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Refreshing network info cache for port 319cec1c-49eb-43a4-a9ec-6b74a507b6d6 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1196.755849] env[63538]: DEBUG nova.network.neutron [-] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.762478] env[63538]: DEBUG oslo_vmware.api [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Task: {'id': task-5101881, 'name': PowerOffVM_Task, 'duration_secs': 0.234183} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.762478] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1196.762478] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1196.762718] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9185b49b-827b-4ea4-8a70-5c2209011eb2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.845277] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1196.845538] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1196.845731] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Deleting the datastore file [datastore1] 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1196.846229] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-971492fe-e502-4da5-a2c5-fe826aaabcb4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.854911] env[63538]: DEBUG oslo_vmware.api [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Waiting for the task: (returnval){ [ 1196.854911] env[63538]: value = "task-5101885" [ 1196.854911] env[63538]: _type = "Task" [ 1196.854911] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.864314] env[63538]: DEBUG oslo_vmware.api [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Task: {'id': task-5101885, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.891694] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.892038] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1196.892310] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "e0d5a3b2-21e1-4de0-ac10-1a5687a60c10-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.892520] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "e0d5a3b2-21e1-4de0-ac10-1a5687a60c10-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1196.892741] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "e0d5a3b2-21e1-4de0-ac10-1a5687a60c10-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1196.895208] env[63538]: INFO nova.compute.manager [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Terminating instance [ 1196.899895] env[63538]: DEBUG nova.compute.manager [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1196.900159] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1196.901025] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0501a122-4569-407a-84f4-6452e26c74bc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.908746] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1196.909058] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b4216fc1-22d2-4efe-bfc9-2b36c49d9b02 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.917355] env[63538]: DEBUG oslo_vmware.api [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1196.917355] env[63538]: value = "task-5101886" [ 1196.917355] env[63538]: _type = "Task" [ 1196.917355] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.926557] env[63538]: DEBUG oslo_vmware.api [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101886, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.956255] env[63538]: DEBUG oslo_vmware.api [None req-476b17ad-bf1a-4e30-a2d2-1e9c035e4cfb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101882, 'name': ReconfigVM_Task, 'duration_secs': 0.242688} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.956661] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-476b17ad-bf1a-4e30-a2d2-1e9c035e4cfb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Reconfigured VM instance instance-00000067 to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1196.962847] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a83bbd8-0533-406a-b6c0-c81adfbcf2c6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.981041] env[63538]: DEBUG oslo_vmware.api [None req-476b17ad-bf1a-4e30-a2d2-1e9c035e4cfb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1196.981041] env[63538]: value = "task-5101887" [ 1196.981041] env[63538]: _type = "Task" [ 1196.981041] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.990951] env[63538]: DEBUG oslo_vmware.api [None req-476b17ad-bf1a-4e30-a2d2-1e9c035e4cfb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101887, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.057816] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6e9e73a5-2aa9-4abb-99ed-652ea9d130b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "e3feec17-ca1b-4873-bb0a-370c3868aabf" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1197.058287] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6e9e73a5-2aa9-4abb-99ed-652ea9d130b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "e3feec17-ca1b-4873-bb0a-370c3868aabf" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1197.193202] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101883, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.232027] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101880, 'name': CreateVM_Task, 'duration_secs': 0.522201} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.232027] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1197.232027] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/fc39aa85-5f54-44b2-83e7-fcf99170aec7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1197.232027] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquired lock "[datastore1] devstack-image-cache_base/fc39aa85-5f54-44b2-83e7-fcf99170aec7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1197.232027] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/fc39aa85-5f54-44b2-83e7-fcf99170aec7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1197.232027] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72bc6791-6430-4cd0-9e71-7568074e99d9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.237452] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1197.237452] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f24e65-dd9e-5377-cb6e-8ead5077c9af" [ 1197.237452] env[63538]: _type = "Task" [ 1197.237452] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.248830] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52f24e65-dd9e-5377-cb6e-8ead5077c9af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.258139] env[63538]: INFO nova.compute.manager [-] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Took 1.04 seconds to deallocate network for instance. [ 1197.365079] env[63538]: DEBUG oslo_vmware.api [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Task: {'id': task-5101885, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190026} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.365365] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1197.365556] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1197.365794] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1197.365983] env[63538]: INFO nova.compute.manager [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1197.366246] env[63538]: DEBUG oslo.service.loopingcall [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1197.366447] env[63538]: DEBUG nova.compute.manager [-] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1197.366541] env[63538]: DEBUG nova.network.neutron [-] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1197.428926] env[63538]: DEBUG oslo_vmware.api [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101886, 'name': PowerOffVM_Task, 'duration_secs': 0.216295} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.429243] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1197.429425] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1197.429693] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-27a0b975-ff9a-470a-9d72-a0268ab1d19c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.494038] env[63538]: DEBUG oslo_vmware.api [None req-476b17ad-bf1a-4e30-a2d2-1e9c035e4cfb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101887, 'name': ReconfigVM_Task, 'duration_secs': 0.166656} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.495820] env[63538]: DEBUG nova.network.neutron [req-66cc849d-8697-4024-b46b-695d20b37a8e req-7b27b11a-af5f-4be4-897b-d0c05619c55e service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Updated VIF entry in instance network info cache for port 319cec1c-49eb-43a4-a9ec-6b74a507b6d6. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1197.496218] env[63538]: DEBUG nova.network.neutron [req-66cc849d-8697-4024-b46b-695d20b37a8e req-7b27b11a-af5f-4be4-897b-d0c05619c55e service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Updating instance_info_cache with network_info: [{"id": "319cec1c-49eb-43a4-a9ec-6b74a507b6d6", "address": "fa:16:3e:ce:c8:c1", "network": {"id": "8fd1ecf4-3813-4ca4-82b4-8ba3f04e3c41", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1232144260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1fe11c1386b14d139f4416cbf20fb201", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap319cec1c-49", "ovs_interfaceid": "319cec1c-49eb-43a4-a9ec-6b74a507b6d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.497556] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-476b17ad-bf1a-4e30-a2d2-1e9c035e4cfb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992526', 'volume_id': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'name': 'volume-c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '048573b4-26db-4a62-81e0-1bc1c3999d02', 'attached_at': '', 'detached_at': '', 'volume_id': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1', 'serial': 'c24d91c9-a15c-486f-a3b8-f0c4b143cda1'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1197.500087] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1197.500333] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1197.500483] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Deleting the datastore file [datastore2] e0d5a3b2-21e1-4de0-ac10-1a5687a60c10 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1197.501833] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-95192d29-12b4-4168-9ac7-1424ab554e46 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.510041] env[63538]: DEBUG oslo_vmware.api [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1197.510041] env[63538]: value = "task-5101889" [ 1197.510041] env[63538]: _type = "Task" [ 1197.510041] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.521345] env[63538]: DEBUG oslo_vmware.api [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101889, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.561769] env[63538]: DEBUG nova.compute.utils [None req-6e9e73a5-2aa9-4abb-99ed-652ea9d130b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1197.694090] env[63538]: DEBUG oslo_vmware.api [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101883, 'name': PowerOnVM_Task, 'duration_secs': 0.561522} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.694397] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1197.694607] env[63538]: INFO nova.compute.manager [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Took 5.91 seconds to spawn the instance on the hypervisor. [ 1197.694791] env[63538]: DEBUG nova.compute.manager [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1197.695598] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16a60d4e-de89-41a7-ac69-0f99e8f448f7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.706900] env[63538]: DEBUG nova.compute.manager [req-c587a5f7-d2a0-4ede-bd43-c9014198a78e req-dfd8e4da-1521-4e87-a9e9-027f2ccb73a3 service nova] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Received event network-vif-deleted-8e0bc120-3db7-47f2-924e-a7cb1aad608b {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1197.707089] env[63538]: INFO nova.compute.manager [req-c587a5f7-d2a0-4ede-bd43-c9014198a78e req-dfd8e4da-1521-4e87-a9e9-027f2ccb73a3 service nova] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Neutron deleted interface 8e0bc120-3db7-47f2-924e-a7cb1aad608b; detaching it from the instance and deleting it from the info cache [ 1197.707287] env[63538]: DEBUG nova.network.neutron [req-c587a5f7-d2a0-4ede-bd43-c9014198a78e req-dfd8e4da-1521-4e87-a9e9-027f2ccb73a3 service nova] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.750552] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Releasing lock "[datastore1] devstack-image-cache_base/fc39aa85-5f54-44b2-83e7-fcf99170aec7" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1197.750841] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Processing image fc39aa85-5f54-44b2-83e7-fcf99170aec7 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1197.751089] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/fc39aa85-5f54-44b2-83e7-fcf99170aec7/fc39aa85-5f54-44b2-83e7-fcf99170aec7.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1197.751258] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquired lock "[datastore1] devstack-image-cache_base/fc39aa85-5f54-44b2-83e7-fcf99170aec7/fc39aa85-5f54-44b2-83e7-fcf99170aec7.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1197.751445] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1197.751981] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-255d4578-9895-43c1-9208-1d3fa1225fcb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.761686] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1197.761909] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1197.762610] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c24a94c6-916a-4c29-8050-0929e62f49d5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.765628] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1197.765872] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1197.766100] env[63538]: DEBUG nova.objects.instance [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Lazy-loading 'resources' on Instance uuid d91a140b-6ca9-4c0e-b433-795d2014975c {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1197.770356] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1197.770356] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e124ea-a1a5-d873-f1e5-4e4adf3c7b98" [ 1197.770356] env[63538]: _type = "Task" [ 1197.770356] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.779532] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e124ea-a1a5-d873-f1e5-4e4adf3c7b98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.001808] env[63538]: DEBUG oslo_concurrency.lockutils [req-66cc849d-8697-4024-b46b-695d20b37a8e req-7b27b11a-af5f-4be4-897b-d0c05619c55e service nova] Releasing lock "refresh_cache-d00151c1-ca34-4c57-9ed2-74d506a0cffb" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1198.020281] env[63538]: DEBUG oslo_vmware.api [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101889, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164253} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.020552] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1198.020743] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1198.020928] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1198.021139] env[63538]: INFO nova.compute.manager [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1198.021412] env[63538]: DEBUG oslo.service.loopingcall [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1198.021621] env[63538]: DEBUG nova.compute.manager [-] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1198.021716] env[63538]: DEBUG nova.network.neutron [-] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1198.051955] env[63538]: DEBUG nova.objects.instance [None req-476b17ad-bf1a-4e30-a2d2-1e9c035e4cfb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lazy-loading 'flavor' on Instance uuid 048573b4-26db-4a62-81e0-1bc1c3999d02 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1198.065640] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6e9e73a5-2aa9-4abb-99ed-652ea9d130b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "e3feec17-ca1b-4873-bb0a-370c3868aabf" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1198.117287] env[63538]: DEBUG nova.network.neutron [-] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.213516] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9a65ed13-2eec-4444-af11-c728a8f047bf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.219338] env[63538]: INFO nova.compute.manager [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Took 12.07 seconds to build instance. [ 1198.226660] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd795ce0-8816-4b5c-a39f-5439f00550fd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.262467] env[63538]: DEBUG nova.compute.manager [req-c587a5f7-d2a0-4ede-bd43-c9014198a78e req-dfd8e4da-1521-4e87-a9e9-027f2ccb73a3 service nova] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Detach interface failed, port_id=8e0bc120-3db7-47f2-924e-a7cb1aad608b, reason: Instance 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1198.283049] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Preparing fetch location {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1198.283334] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Fetch image to [datastore1] OSTACK_IMG_a07a6cf1-848d-4c75-ac78-e0b152ede9d8/OSTACK_IMG_a07a6cf1-848d-4c75-ac78-e0b152ede9d8.vmdk {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1198.283918] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Downloading stream optimized image fc39aa85-5f54-44b2-83e7-fcf99170aec7 to [datastore1] OSTACK_IMG_a07a6cf1-848d-4c75-ac78-e0b152ede9d8/OSTACK_IMG_a07a6cf1-848d-4c75-ac78-e0b152ede9d8.vmdk on the data store datastore1 as vApp {{(pid=63538) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1198.283918] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Downloading image file data fc39aa85-5f54-44b2-83e7-fcf99170aec7 to the ESX as VM named 'OSTACK_IMG_a07a6cf1-848d-4c75-ac78-e0b152ede9d8' {{(pid=63538) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1198.441094] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f65befa6-5cf8-4ed9-bcc5-eb3a4f5d2d21 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.449779] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d9b61bf-3209-4989-aec8-c547de581201 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.481179] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ae72055-6c30-4d10-86b3-0a311f7917b3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.491377] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de2d4cd1-6a77-4efc-a80e-d4f569bbc028 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.507916] env[63538]: DEBUG nova.compute.provider_tree [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1198.509968] env[63538]: DEBUG oslo_vmware.rw_handles [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1198.509968] env[63538]: value = "resgroup-9" [ 1198.509968] env[63538]: _type = "ResourcePool" [ 1198.509968] env[63538]: }. {{(pid=63538) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1198.511806] env[63538]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-407bb511-c722-4650-a8b7-9643e49b9f8f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.528681] env[63538]: DEBUG nova.scheduler.client.report [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1198.539366] env[63538]: DEBUG oslo_vmware.rw_handles [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lease: (returnval){ [ 1198.539366] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b8d2f6-bdbc-9183-bd6b-3368d5f6b9ee" [ 1198.539366] env[63538]: _type = "HttpNfcLease" [ 1198.539366] env[63538]: } obtained for vApp import into resource pool (val){ [ 1198.539366] env[63538]: value = "resgroup-9" [ 1198.539366] env[63538]: _type = "ResourcePool" [ 1198.539366] env[63538]: }. {{(pid=63538) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1198.539871] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the lease: (returnval){ [ 1198.539871] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b8d2f6-bdbc-9183-bd6b-3368d5f6b9ee" [ 1198.539871] env[63538]: _type = "HttpNfcLease" [ 1198.539871] env[63538]: } to be ready. {{(pid=63538) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1198.549757] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1198.549757] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b8d2f6-bdbc-9183-bd6b-3368d5f6b9ee" [ 1198.549757] env[63538]: _type = "HttpNfcLease" [ 1198.549757] env[63538]: } is initializing. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1198.622935] env[63538]: INFO nova.compute.manager [-] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Took 1.26 seconds to deallocate network for instance. [ 1198.721468] env[63538]: DEBUG oslo_concurrency.lockutils [None req-34b082c3-efed-4752-9ea0-8cd371c718e2 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Lock "df85b1e1-0319-4619-8680-73bb5d413595" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.580s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1199.027879] env[63538]: DEBUG nova.network.neutron [-] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.033793] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.268s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1199.050144] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1199.050144] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b8d2f6-bdbc-9183-bd6b-3368d5f6b9ee" [ 1199.050144] env[63538]: _type = "HttpNfcLease" [ 1199.050144] env[63538]: } is initializing. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1199.060613] env[63538]: INFO nova.scheduler.client.report [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Deleted allocations for instance d91a140b-6ca9-4c0e-b433-795d2014975c [ 1199.068043] env[63538]: DEBUG oslo_concurrency.lockutils [None req-476b17ad-bf1a-4e30-a2d2-1e9c035e4cfb tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "048573b4-26db-4a62-81e0-1bc1c3999d02" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.308s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1199.131580] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1199.131967] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.132299] env[63538]: DEBUG nova.objects.instance [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Lazy-loading 'resources' on Instance uuid 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1199.154945] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6e9e73a5-2aa9-4abb-99ed-652ea9d130b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "e3feec17-ca1b-4873-bb0a-370c3868aabf" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1199.155619] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6e9e73a5-2aa9-4abb-99ed-652ea9d130b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "e3feec17-ca1b-4873-bb0a-370c3868aabf" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.155619] env[63538]: INFO nova.compute.manager [None req-6e9e73a5-2aa9-4abb-99ed-652ea9d130b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Attaching volume f5c40b11-75f1-482f-aa80-8e412c228ea5 to /dev/sdb [ 1199.196067] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f03464c1-5464-42e1-96fa-ddfe2f06e0d3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.204554] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc5d506-1df3-4c2e-ad79-3e6fc3ab8bf5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.219127] env[63538]: DEBUG nova.virt.block_device [None req-6e9e73a5-2aa9-4abb-99ed-652ea9d130b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Updating existing volume attachment record: 97b5c017-a726-4bb4-9e31-12c4dfdde82e {{(pid=63538) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1199.323837] env[63538]: INFO nova.compute.manager [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Rebuilding instance [ 1199.376536] env[63538]: DEBUG nova.compute.manager [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1199.377635] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cca7b12-2a8a-41c6-a4b2-cf543f02d729 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.531426] env[63538]: INFO nova.compute.manager [-] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Took 1.51 seconds to deallocate network for instance. [ 1199.550581] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1199.550581] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b8d2f6-bdbc-9183-bd6b-3368d5f6b9ee" [ 1199.550581] env[63538]: _type = "HttpNfcLease" [ 1199.550581] env[63538]: } is ready. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1199.551014] env[63538]: DEBUG oslo_vmware.rw_handles [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1199.551014] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b8d2f6-bdbc-9183-bd6b-3368d5f6b9ee" [ 1199.551014] env[63538]: _type = "HttpNfcLease" [ 1199.551014] env[63538]: }. {{(pid=63538) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1199.551908] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ec47f1-1388-46b1-8f77-23f90b1fd8b8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.566421] env[63538]: DEBUG oslo_vmware.rw_handles [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52eaedb3-2fca-c87a-c999-cac0217bf4df/disk-0.vmdk from lease info. {{(pid=63538) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1199.566665] env[63538]: DEBUG oslo_vmware.rw_handles [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Creating HTTP connection to write to file with size = 31669248 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52eaedb3-2fca-c87a-c999-cac0217bf4df/disk-0.vmdk. {{(pid=63538) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1199.624308] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1ce382ba-50d9-4d0b-bbd3-536e551bc9f9 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Lock "d91a140b-6ca9-4c0e-b433-795d2014975c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.641s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1199.633237] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8a128b4c-f9c9-4480-9c3f-b313e0a172ce {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.737467] env[63538]: DEBUG nova.compute.manager [req-54d97135-8768-4c34-9034-97a4602560e1 req-b75fe5d9-cda4-4be1-9671-0e023ed9ad1a service nova] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Received event network-vif-deleted-c8b86754-970c-4f8a-b3fb-ec8fb42d3863 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1199.782016] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad5d33d-53af-4b92-a339-018d0de2b777 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.790288] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d21fa349-28cf-4857-ac07-355454255fa4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.828245] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-991e06ab-b668-45ac-a72c-bf58ca532642 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.837213] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f49eec20-e522-42d8-b05a-03742243ae74 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.851807] env[63538]: DEBUG nova.compute.provider_tree [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1199.889774] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1199.890151] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c46c7ff3-98ca-47b0-b556-160ae8426681 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.900018] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Waiting for the task: (returnval){ [ 1199.900018] env[63538]: value = "task-5101894" [ 1199.900018] env[63538]: _type = "Task" [ 1199.900018] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.911270] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101894, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.039028] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1200.070076] env[63538]: DEBUG oslo_concurrency.lockutils [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "048573b4-26db-4a62-81e0-1bc1c3999d02" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1200.070294] env[63538]: DEBUG oslo_concurrency.lockutils [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "048573b4-26db-4a62-81e0-1bc1c3999d02" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1200.070389] env[63538]: DEBUG oslo_concurrency.lockutils [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "048573b4-26db-4a62-81e0-1bc1c3999d02-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1200.070581] env[63538]: DEBUG oslo_concurrency.lockutils [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "048573b4-26db-4a62-81e0-1bc1c3999d02-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1200.070762] env[63538]: DEBUG oslo_concurrency.lockutils [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "048573b4-26db-4a62-81e0-1bc1c3999d02-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.073251] env[63538]: INFO nova.compute.manager [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Terminating instance [ 1200.076431] env[63538]: DEBUG nova.compute.manager [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1200.076648] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1200.077610] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea52480b-c5eb-4355-8f1d-90dde1df9259 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.086854] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1200.087180] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b163c3f8-17cc-4ce5-b51c-c8d21ba33fd7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.094249] env[63538]: DEBUG oslo_vmware.api [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1200.094249] env[63538]: value = "task-5101895" [ 1200.094249] env[63538]: _type = "Task" [ 1200.094249] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.105111] env[63538]: DEBUG oslo_vmware.api [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101895, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.356988] env[63538]: DEBUG nova.scheduler.client.report [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1200.412364] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101894, 'name': PowerOffVM_Task, 'duration_secs': 0.300156} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.412654] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1200.412872] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1200.413662] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d7fe1d-a0b5-44d4-86b4-6ce065e9eb0c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.422792] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1200.423059] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d7c0bbda-828d-4ba3-bb25-a6dbb72b3506 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.452999] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1200.453329] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1200.453576] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Deleting the datastore file [datastore1] df85b1e1-0319-4619-8680-73bb5d413595 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1200.453921] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8de09b49-1695-45b2-b01e-a2d7e673f775 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.461324] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Waiting for the task: (returnval){ [ 1200.461324] env[63538]: value = "task-5101897" [ 1200.461324] env[63538]: _type = "Task" [ 1200.461324] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.471421] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101897, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.518603] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquiring lock "6a8de8d2-aa15-4057-a936-57cad9c8b1d0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1200.519124] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Lock "6a8de8d2-aa15-4057-a936-57cad9c8b1d0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1200.519252] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquiring lock "6a8de8d2-aa15-4057-a936-57cad9c8b1d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1200.519393] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Lock "6a8de8d2-aa15-4057-a936-57cad9c8b1d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1200.519616] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Lock "6a8de8d2-aa15-4057-a936-57cad9c8b1d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.523060] env[63538]: INFO nova.compute.manager [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Terminating instance [ 1200.525060] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquiring lock "refresh_cache-6a8de8d2-aa15-4057-a936-57cad9c8b1d0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1200.525228] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquired lock "refresh_cache-6a8de8d2-aa15-4057-a936-57cad9c8b1d0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.525407] env[63538]: DEBUG nova.network.neutron [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1200.606858] env[63538]: DEBUG oslo_vmware.api [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101895, 'name': PowerOffVM_Task, 'duration_secs': 0.175814} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.607251] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1200.607478] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1200.607750] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8c8de4b5-778d-489e-ae36-1458f11f2c1c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.678906] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1200.679245] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1200.679344] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Deleting the datastore file [datastore2] 048573b4-26db-4a62-81e0-1bc1c3999d02 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1200.679617] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e1773296-a4c4-4037-9c16-de2741f2c15b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.688724] env[63538]: DEBUG oslo_vmware.api [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1200.688724] env[63538]: value = "task-5101899" [ 1200.688724] env[63538]: _type = "Task" [ 1200.688724] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.699293] env[63538]: DEBUG oslo_vmware.api [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101899, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.864722] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.733s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.867236] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.828s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1200.867405] env[63538]: DEBUG nova.objects.instance [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lazy-loading 'resources' on Instance uuid e0d5a3b2-21e1-4de0-ac10-1a5687a60c10 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1200.890695] env[63538]: INFO nova.scheduler.client.report [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Deleted allocations for instance 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a [ 1200.923805] env[63538]: DEBUG oslo_vmware.rw_handles [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Completed reading data from the image iterator. {{(pid=63538) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1200.923994] env[63538]: DEBUG oslo_vmware.rw_handles [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52eaedb3-2fca-c87a-c999-cac0217bf4df/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1200.925150] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54fa6eae-2ea7-4034-8f1e-9a253d890744 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.932634] env[63538]: DEBUG oslo_vmware.rw_handles [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52eaedb3-2fca-c87a-c999-cac0217bf4df/disk-0.vmdk is in state: ready. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1200.932872] env[63538]: DEBUG oslo_vmware.rw_handles [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52eaedb3-2fca-c87a-c999-cac0217bf4df/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1200.933152] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-4b943e00-1544-4d56-b7de-8eb7958a97b4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.972061] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101897, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.110607} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.972380] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1200.972597] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1200.972849] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1201.045715] env[63538]: DEBUG nova.network.neutron [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1201.092300] env[63538]: DEBUG nova.network.neutron [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.198683] env[63538]: DEBUG oslo_vmware.api [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101899, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158492} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.198951] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1201.199172] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1201.199364] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1201.199550] env[63538]: INFO nova.compute.manager [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1201.199815] env[63538]: DEBUG oslo.service.loopingcall [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1201.200032] env[63538]: DEBUG nova.compute.manager [-] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1201.200134] env[63538]: DEBUG nova.network.neutron [-] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1201.205364] env[63538]: DEBUG oslo_vmware.rw_handles [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52eaedb3-2fca-c87a-c999-cac0217bf4df/disk-0.vmdk. {{(pid=63538) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1201.205577] env[63538]: INFO nova.virt.vmwareapi.images [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Downloaded image file data fc39aa85-5f54-44b2-83e7-fcf99170aec7 [ 1201.206406] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35d0b37-7432-442f-963e-6fa194abedae {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.222502] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6b85f909-b23e-417f-a151-11a1f537dcb0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.274198] env[63538]: INFO nova.virt.vmwareapi.images [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] The imported VM was unregistered [ 1201.276634] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Caching image {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1201.276950] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Creating directory with path [datastore1] devstack-image-cache_base/fc39aa85-5f54-44b2-83e7-fcf99170aec7 {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1201.277351] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13883966-0563-4973-a955-00e302c426d1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.289881] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Created directory with path [datastore1] devstack-image-cache_base/fc39aa85-5f54-44b2-83e7-fcf99170aec7 {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1201.290122] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_a07a6cf1-848d-4c75-ac78-e0b152ede9d8/OSTACK_IMG_a07a6cf1-848d-4c75-ac78-e0b152ede9d8.vmdk to [datastore1] devstack-image-cache_base/fc39aa85-5f54-44b2-83e7-fcf99170aec7/fc39aa85-5f54-44b2-83e7-fcf99170aec7.vmdk. {{(pid=63538) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1201.290408] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-245078f6-cdf8-4b47-aa4f-180f7e920432 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.298230] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1201.298230] env[63538]: value = "task-5101901" [ 1201.298230] env[63538]: _type = "Task" [ 1201.298230] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.308634] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101901, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.399625] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8ba4478f-7584-4293-b285-23d5e546640e tempest-ServerTagsTestJSON-2008059827 tempest-ServerTagsTestJSON-2008059827-project-member] Lock "58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.191s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.498398] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acd1a30f-4db5-4f4a-93a8-a65924d4932b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.508122] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c94d7d2-55db-441c-9ea5-9bf647bef539 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.542013] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9f2a67e-b7d9-44bf-b052-5490d35ab43c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.552043] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006e8ea7-ddfd-4921-863c-2c03364f2b2c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.573549] env[63538]: DEBUG nova.compute.provider_tree [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1201.595117] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Releasing lock "refresh_cache-6a8de8d2-aa15-4057-a936-57cad9c8b1d0" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1201.595587] env[63538]: DEBUG nova.compute.manager [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1201.595790] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1201.597089] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e30371f-e308-4679-bc9c-27e653d34ce1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.605671] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1201.605946] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ed2be268-9805-47f9-bc3a-d6f518e27515 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.614372] env[63538]: DEBUG oslo_vmware.api [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1201.614372] env[63538]: value = "task-5101903" [ 1201.614372] env[63538]: _type = "Task" [ 1201.614372] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.624689] env[63538]: DEBUG oslo_vmware.api [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101903, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.780191] env[63538]: DEBUG nova.compute.manager [req-b2dad8d7-b03b-440a-b5cd-4006a9619673 req-b168f696-2531-4bc2-8647-9b1443f2af6b service nova] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Received event network-vif-deleted-3de39e87-f579-458e-a713-326821c5daa5 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1201.780515] env[63538]: INFO nova.compute.manager [req-b2dad8d7-b03b-440a-b5cd-4006a9619673 req-b168f696-2531-4bc2-8647-9b1443f2af6b service nova] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Neutron deleted interface 3de39e87-f579-458e-a713-326821c5daa5; detaching it from the instance and deleting it from the info cache [ 1201.780614] env[63538]: DEBUG nova.network.neutron [req-b2dad8d7-b03b-440a-b5cd-4006a9619673 req-b168f696-2531-4bc2-8647-9b1443f2af6b service nova] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.811677] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101901, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.014056] env[63538]: DEBUG nova.virt.hardware [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1202.014450] env[63538]: DEBUG nova.virt.hardware [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1202.014533] env[63538]: DEBUG nova.virt.hardware [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1202.014741] env[63538]: DEBUG nova.virt.hardware [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1202.015083] env[63538]: DEBUG nova.virt.hardware [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1202.019019] env[63538]: DEBUG nova.virt.hardware [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1202.019019] env[63538]: DEBUG nova.virt.hardware [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1202.019019] env[63538]: DEBUG nova.virt.hardware [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1202.019019] env[63538]: DEBUG nova.virt.hardware [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1202.019019] env[63538]: DEBUG nova.virt.hardware [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1202.019019] env[63538]: DEBUG nova.virt.hardware [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1202.019019] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af6b53da-c7c6-4e01-a9db-9853722bd2b6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.026136] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4993202f-2e7c-4854-b729-f230c4b4c525 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.042260] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Instance VIF info [] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1202.049516] env[63538]: DEBUG oslo.service.loopingcall [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1202.050787] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1202.051019] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d4b7be80-b5c7-416d-a2fd-cf63ee0d91d2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.070567] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1202.070567] env[63538]: value = "task-5101904" [ 1202.070567] env[63538]: _type = "Task" [ 1202.070567] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.077170] env[63538]: DEBUG nova.scheduler.client.report [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1202.084509] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101904, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.125969] env[63538]: DEBUG oslo_vmware.api [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101903, 'name': PowerOffVM_Task, 'duration_secs': 0.150922} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.126331] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1202.126536] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1202.126846] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dead9b74-9c11-4107-8b69-655061d4d55f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.169922] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1202.170415] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1202.170415] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Deleting the datastore file [datastore1] 6a8de8d2-aa15-4057-a936-57cad9c8b1d0 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1202.170722] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7823db4d-95f5-459f-8717-1aa33cb15ab6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.180781] env[63538]: DEBUG oslo_vmware.api [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for the task: (returnval){ [ 1202.180781] env[63538]: value = "task-5101906" [ 1202.180781] env[63538]: _type = "Task" [ 1202.180781] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.191406] env[63538]: DEBUG oslo_vmware.api [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101906, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.195943] env[63538]: DEBUG nova.network.neutron [-] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1202.291511] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d0092ca5-9ffc-4f8f-9610-297d1c1be3ae {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.312088] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-175f5930-7677-4c6e-991d-9695563984d5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.329130] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101901, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.360224] env[63538]: DEBUG nova.compute.manager [req-b2dad8d7-b03b-440a-b5cd-4006a9619673 req-b168f696-2531-4bc2-8647-9b1443f2af6b service nova] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Detach interface failed, port_id=3de39e87-f579-458e-a713-326821c5daa5, reason: Instance 048573b4-26db-4a62-81e0-1bc1c3999d02 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1202.582591] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.715s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.585049] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101904, 'name': CreateVM_Task, 'duration_secs': 0.353105} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.585676] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1202.586233] env[63538]: DEBUG oslo_concurrency.lockutils [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1202.586433] env[63538]: DEBUG oslo_concurrency.lockutils [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1202.586808] env[63538]: DEBUG oslo_concurrency.lockutils [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1202.588036] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8412ad8b-b625-43cd-9392-18b99d310b77 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.596616] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Waiting for the task: (returnval){ [ 1202.596616] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5274b2be-07cf-0b5c-0d15-83bfebc67df1" [ 1202.596616] env[63538]: _type = "Task" [ 1202.596616] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.606642] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5274b2be-07cf-0b5c-0d15-83bfebc67df1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.615567] env[63538]: INFO nova.scheduler.client.report [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Deleted allocations for instance e0d5a3b2-21e1-4de0-ac10-1a5687a60c10 [ 1202.692543] env[63538]: DEBUG oslo_vmware.api [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101906, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.698626] env[63538]: INFO nova.compute.manager [-] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Took 1.50 seconds to deallocate network for instance. [ 1202.818276] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101901, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.109251] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5274b2be-07cf-0b5c-0d15-83bfebc67df1, 'name': SearchDatastore_Task, 'duration_secs': 0.068586} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.109740] env[63538]: DEBUG oslo_concurrency.lockutils [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1203.109980] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1203.110401] env[63538]: DEBUG oslo_concurrency.lockutils [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1203.110605] env[63538]: DEBUG oslo_concurrency.lockutils [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.110837] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1203.111248] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0539918c-a117-424b-aa2d-d869f879bc6e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.125725] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3edd421f-b795-446f-b41c-9f6a4669da5f tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "e0d5a3b2-21e1-4de0-ac10-1a5687a60c10" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.234s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.126866] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1203.127209] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1203.128636] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc2d4b9d-bdd8-4d71-a8be-9efa7294ca58 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.136813] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Waiting for the task: (returnval){ [ 1203.136813] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52403eea-f870-783b-0891-27dc7fd1bd8f" [ 1203.136813] env[63538]: _type = "Task" [ 1203.136813] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.152272] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52403eea-f870-783b-0891-27dc7fd1bd8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.193814] env[63538]: DEBUG oslo_vmware.api [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101906, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.205667] env[63538]: DEBUG oslo_concurrency.lockutils [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.206019] env[63538]: DEBUG oslo_concurrency.lockutils [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.206215] env[63538]: DEBUG nova.objects.instance [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lazy-loading 'resources' on Instance uuid 048573b4-26db-4a62-81e0-1bc1c3999d02 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1203.313147] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101901, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.649922] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52403eea-f870-783b-0891-27dc7fd1bd8f, 'name': SearchDatastore_Task, 'duration_secs': 0.044555} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.650877] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0573de59-c063-4363-b437-b7ca67650ff7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.657833] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Waiting for the task: (returnval){ [ 1203.657833] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5285b365-8543-37ff-70cf-8bdc705f1d94" [ 1203.657833] env[63538]: _type = "Task" [ 1203.657833] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.666839] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5285b365-8543-37ff-70cf-8bdc705f1d94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.694160] env[63538]: DEBUG oslo_vmware.api [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Task: {'id': task-5101906, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.27933} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.694160] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1203.694343] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1203.694538] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1203.694719] env[63538]: INFO nova.compute.manager [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Took 2.10 seconds to destroy the instance on the hypervisor. [ 1203.695010] env[63538]: DEBUG oslo.service.loopingcall [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1203.695254] env[63538]: DEBUG nova.compute.manager [-] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1203.695354] env[63538]: DEBUG nova.network.neutron [-] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1203.712271] env[63538]: DEBUG nova.network.neutron [-] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1203.766561] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e9e73a5-2aa9-4abb-99ed-652ea9d130b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Volume attach. Driver type: vmdk {{(pid=63538) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1203.766874] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e9e73a5-2aa9-4abb-99ed-652ea9d130b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992551', 'volume_id': 'f5c40b11-75f1-482f-aa80-8e412c228ea5', 'name': 'volume-f5c40b11-75f1-482f-aa80-8e412c228ea5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e3feec17-ca1b-4873-bb0a-370c3868aabf', 'attached_at': '', 'detached_at': '', 'volume_id': 'f5c40b11-75f1-482f-aa80-8e412c228ea5', 'serial': 'f5c40b11-75f1-482f-aa80-8e412c228ea5'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1203.767827] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b842d26-ed7a-4e61-b7b4-21ad659d5214 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.793304] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba5c647-f999-40d9-ad2f-4ea77e01506d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.819608] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e9e73a5-2aa9-4abb-99ed-652ea9d130b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] volume-f5c40b11-75f1-482f-aa80-8e412c228ea5/volume-f5c40b11-75f1-482f-aa80-8e412c228ea5.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1203.824984] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-788755a2-19e0-46a7-87f1-cfa7ea3eceed {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.845271] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101901, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.847122] env[63538]: DEBUG oslo_vmware.api [None req-6e9e73a5-2aa9-4abb-99ed-652ea9d130b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1203.847122] env[63538]: value = "task-5101907" [ 1203.847122] env[63538]: _type = "Task" [ 1203.847122] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.861655] env[63538]: DEBUG oslo_vmware.api [None req-6e9e73a5-2aa9-4abb-99ed-652ea9d130b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101907, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.863443] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57966425-7b51-49d1-b738-97ce860dbf97 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.871232] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abfbd889-77af-4f46-a1cc-9629823ad08e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.901179] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-effbdf58-2ea7-42c2-937f-8f7358d54348 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.909185] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3734be1-6a8d-4e0b-9906-d814e584df82 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.922617] env[63538]: DEBUG nova.compute.provider_tree [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1204.170337] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5285b365-8543-37ff-70cf-8bdc705f1d94, 'name': SearchDatastore_Task, 'duration_secs': 0.04667} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.170616] env[63538]: DEBUG oslo_concurrency.lockutils [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1204.170873] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] df85b1e1-0319-4619-8680-73bb5d413595/df85b1e1-0319-4619-8680-73bb5d413595.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1204.171316] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dedf7c68-8c5d-4760-a30e-c53e60a436f8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.179420] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Waiting for the task: (returnval){ [ 1204.179420] env[63538]: value = "task-5101908" [ 1204.179420] env[63538]: _type = "Task" [ 1204.179420] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.188352] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101908, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.214340] env[63538]: DEBUG nova.network.neutron [-] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1204.284797] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "057f192d-b470-4683-b197-913457d10717" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1204.285017] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "057f192d-b470-4683-b197-913457d10717" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1204.323100] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101901, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.559681} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.323402] env[63538]: INFO nova.virt.vmwareapi.ds_util [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_a07a6cf1-848d-4c75-ac78-e0b152ede9d8/OSTACK_IMG_a07a6cf1-848d-4c75-ac78-e0b152ede9d8.vmdk to [datastore1] devstack-image-cache_base/fc39aa85-5f54-44b2-83e7-fcf99170aec7/fc39aa85-5f54-44b2-83e7-fcf99170aec7.vmdk. [ 1204.323601] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Cleaning up location [datastore1] OSTACK_IMG_a07a6cf1-848d-4c75-ac78-e0b152ede9d8 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1204.323788] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_a07a6cf1-848d-4c75-ac78-e0b152ede9d8 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1204.324065] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6bc20c03-9971-4a7b-b009-5bc75368789f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.330658] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1204.330658] env[63538]: value = "task-5101909" [ 1204.330658] env[63538]: _type = "Task" [ 1204.330658] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.338516] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101909, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.359828] env[63538]: DEBUG oslo_vmware.api [None req-6e9e73a5-2aa9-4abb-99ed-652ea9d130b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101907, 'name': ReconfigVM_Task, 'duration_secs': 0.407575} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.360135] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e9e73a5-2aa9-4abb-99ed-652ea9d130b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Reconfigured VM instance instance-00000070 to attach disk [datastore1] volume-f5c40b11-75f1-482f-aa80-8e412c228ea5/volume-f5c40b11-75f1-482f-aa80-8e412c228ea5.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1204.365232] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5dace482-12ba-424f-b75b-157b264c7a5d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.380325] env[63538]: DEBUG oslo_vmware.api [None req-6e9e73a5-2aa9-4abb-99ed-652ea9d130b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1204.380325] env[63538]: value = "task-5101910" [ 1204.380325] env[63538]: _type = "Task" [ 1204.380325] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.388780] env[63538]: DEBUG oslo_vmware.api [None req-6e9e73a5-2aa9-4abb-99ed-652ea9d130b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101910, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.425934] env[63538]: DEBUG nova.scheduler.client.report [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1204.691502] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101908, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.717919] env[63538]: INFO nova.compute.manager [-] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Took 1.02 seconds to deallocate network for instance. [ 1204.787282] env[63538]: DEBUG nova.compute.manager [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1204.840567] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101909, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.048499} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.840810] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1204.840992] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Releasing lock "[datastore1] devstack-image-cache_base/fc39aa85-5f54-44b2-83e7-fcf99170aec7/fc39aa85-5f54-44b2-83e7-fcf99170aec7.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1204.841266] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/fc39aa85-5f54-44b2-83e7-fcf99170aec7/fc39aa85-5f54-44b2-83e7-fcf99170aec7.vmdk to [datastore1] d00151c1-ca34-4c57-9ed2-74d506a0cffb/d00151c1-ca34-4c57-9ed2-74d506a0cffb.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1204.841579] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b52790bd-dec5-40f7-a2c1-12cdba2c36e5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.850014] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1204.850014] env[63538]: value = "task-5101911" [ 1204.850014] env[63538]: _type = "Task" [ 1204.850014] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.858823] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101911, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.892630] env[63538]: DEBUG oslo_vmware.api [None req-6e9e73a5-2aa9-4abb-99ed-652ea9d130b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101910, 'name': ReconfigVM_Task, 'duration_secs': 0.168793} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.893054] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e9e73a5-2aa9-4abb-99ed-652ea9d130b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992551', 'volume_id': 'f5c40b11-75f1-482f-aa80-8e412c228ea5', 'name': 'volume-f5c40b11-75f1-482f-aa80-8e412c228ea5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e3feec17-ca1b-4873-bb0a-370c3868aabf', 'attached_at': '', 'detached_at': '', 'volume_id': 'f5c40b11-75f1-482f-aa80-8e412c228ea5', 'serial': 'f5c40b11-75f1-482f-aa80-8e412c228ea5'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1204.931625] env[63538]: DEBUG oslo_concurrency.lockutils [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.725s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1204.954355] env[63538]: INFO nova.scheduler.client.report [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Deleted allocations for instance 048573b4-26db-4a62-81e0-1bc1c3999d02 [ 1205.190919] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101908, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.628235} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.191304] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] df85b1e1-0319-4619-8680-73bb5d413595/df85b1e1-0319-4619-8680-73bb5d413595.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1205.191571] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1205.191739] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0b9b664d-e097-4579-9045-44843ed16b13 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.200601] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Waiting for the task: (returnval){ [ 1205.200601] env[63538]: value = "task-5101912" [ 1205.200601] env[63538]: _type = "Task" [ 1205.200601] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.209874] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101912, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.226328] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1205.226673] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1205.226908] env[63538]: DEBUG nova.objects.instance [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Lazy-loading 'resources' on Instance uuid 6a8de8d2-aa15-4057-a936-57cad9c8b1d0 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1205.311863] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1205.362274] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101911, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.461751] env[63538]: DEBUG oslo_concurrency.lockutils [None req-58e501d2-6a6b-4ae6-a7f8-ce2bb3759211 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "048573b4-26db-4a62-81e0-1bc1c3999d02" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.391s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.711590] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101912, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081347} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.711940] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1205.713548] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebeaa10e-cd69-4109-938e-d7f3e09a79b1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.733408] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] df85b1e1-0319-4619-8680-73bb5d413595/df85b1e1-0319-4619-8680-73bb5d413595.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1205.736357] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61736f32-35d6-46e4-9a0e-f5bcbd78000a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.757484] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Waiting for the task: (returnval){ [ 1205.757484] env[63538]: value = "task-5101913" [ 1205.757484] env[63538]: _type = "Task" [ 1205.757484] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.767162] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101913, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.868996] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101911, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.881530] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3502a1af-f16e-46fe-b0a9-ac86ed5ef93d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.894617] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ebb3088-e5e6-4436-ad1d-8244735c43da {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.948741] env[63538]: DEBUG nova.objects.instance [None req-6e9e73a5-2aa9-4abb-99ed-652ea9d130b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lazy-loading 'flavor' on Instance uuid e3feec17-ca1b-4873-bb0a-370c3868aabf {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1205.952018] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f0b71fb-7f6c-4fc4-bba5-c06e975d66e7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.961764] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b714938-ee72-4fee-9b91-b61de7ec920c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.978400] env[63538]: DEBUG nova.compute.provider_tree [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1206.268299] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101913, 'name': ReconfigVM_Task, 'duration_secs': 0.34983} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.268645] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Reconfigured VM instance instance-00000075 to attach disk [datastore2] df85b1e1-0319-4619-8680-73bb5d413595/df85b1e1-0319-4619-8680-73bb5d413595.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1206.269381] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d366287e-7a87-4f8c-9915-e17c7d3dce1f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.277453] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Waiting for the task: (returnval){ [ 1206.277453] env[63538]: value = "task-5101914" [ 1206.277453] env[63538]: _type = "Task" [ 1206.277453] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.286933] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101914, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.361615] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101911, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.457137] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6e9e73a5-2aa9-4abb-99ed-652ea9d130b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "e3feec17-ca1b-4873-bb0a-370c3868aabf" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.300s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.481789] env[63538]: DEBUG nova.scheduler.client.report [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1206.621471] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e40cca11-e6ad-4cce-b257-b51ab72862bc tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "f0183c1f-4557-45fd-ba65-4821ef661173" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.621820] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e40cca11-e6ad-4cce-b257-b51ab72862bc tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "f0183c1f-4557-45fd-ba65-4821ef661173" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1206.728958] env[63538]: DEBUG oslo_concurrency.lockutils [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "e93aab2e-f8c4-4959-923f-0449a84108d6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.729445] env[63538]: DEBUG oslo_concurrency.lockutils [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "e93aab2e-f8c4-4959-923f-0449a84108d6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1206.788209] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101914, 'name': Rename_Task, 'duration_secs': 0.151825} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.788550] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1206.788817] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a5775810-a56f-41e1-9e37-6f5f8bb04434 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.797182] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Waiting for the task: (returnval){ [ 1206.797182] env[63538]: value = "task-5101915" [ 1206.797182] env[63538]: _type = "Task" [ 1206.797182] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.810610] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101915, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.865046] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101911, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.986459] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.760s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.989118] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.677s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1206.990812] env[63538]: INFO nova.compute.claims [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1207.018076] env[63538]: INFO nova.scheduler.client.report [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Deleted allocations for instance 6a8de8d2-aa15-4057-a936-57cad9c8b1d0 [ 1207.125641] env[63538]: DEBUG nova.compute.utils [None req-e40cca11-e6ad-4cce-b257-b51ab72862bc tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1207.231935] env[63538]: DEBUG nova.compute.manager [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1207.308935] env[63538]: DEBUG oslo_vmware.api [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101915, 'name': PowerOnVM_Task, 'duration_secs': 0.495615} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.309326] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1207.309637] env[63538]: DEBUG nova.compute.manager [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1207.310523] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-669a858e-4fa2-4767-bb40-880447da61d5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.362380] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101911, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.529367] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b619e328-fdbf-4c03-84f5-50649d7f1315 tempest-ServerShowV247Test-1654296754 tempest-ServerShowV247Test-1654296754-project-member] Lock "6a8de8d2-aa15-4057-a936-57cad9c8b1d0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.010s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1207.628844] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e40cca11-e6ad-4cce-b257-b51ab72862bc tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "f0183c1f-4557-45fd-ba65-4821ef661173" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1207.751272] env[63538]: DEBUG oslo_concurrency.lockutils [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1207.827956] env[63538]: DEBUG oslo_concurrency.lockutils [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1207.866758] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101911, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.522439} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.867031] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/fc39aa85-5f54-44b2-83e7-fcf99170aec7/fc39aa85-5f54-44b2-83e7-fcf99170aec7.vmdk to [datastore1] d00151c1-ca34-4c57-9ed2-74d506a0cffb/d00151c1-ca34-4c57-9ed2-74d506a0cffb.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1207.867891] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beac77fd-d82e-47af-a5c7-e1e66a9efb31 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.891371] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] d00151c1-ca34-4c57-9ed2-74d506a0cffb/d00151c1-ca34-4c57-9ed2-74d506a0cffb.vmdk or device None with type streamOptimized {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1207.891645] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b42297d2-6e7e-4bd8-b1c7-9d158535193f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.914410] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1207.914410] env[63538]: value = "task-5101916" [ 1207.914410] env[63538]: _type = "Task" [ 1207.914410] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.924191] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101916, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.094068] env[63538]: DEBUG oslo_concurrency.lockutils [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "c1766d8e-7949-4fa8-a762-007d016a4de1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1208.094324] env[63538]: DEBUG oslo_concurrency.lockutils [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "c1766d8e-7949-4fa8-a762-007d016a4de1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1208.114877] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b387c13-8385-4319-bba8-4932414ac292 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.123664] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa02355-5725-4044-a4ce-386010711bb1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.156959] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-112cdbd5-3a90-4d71-8251-f137e59c903f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.165333] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c92ec7-276c-4a02-a801-87f81a8b801d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.179522] env[63538]: DEBUG nova.compute.provider_tree [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1208.312233] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Acquiring lock "df85b1e1-0319-4619-8680-73bb5d413595" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1208.312233] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Lock "df85b1e1-0319-4619-8680-73bb5d413595" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1208.312233] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Acquiring lock "df85b1e1-0319-4619-8680-73bb5d413595-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1208.312233] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Lock "df85b1e1-0319-4619-8680-73bb5d413595-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1208.312233] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Lock "df85b1e1-0319-4619-8680-73bb5d413595-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1208.314343] env[63538]: INFO nova.compute.manager [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Terminating instance [ 1208.316286] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Acquiring lock "refresh_cache-df85b1e1-0319-4619-8680-73bb5d413595" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1208.316461] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Acquired lock "refresh_cache-df85b1e1-0319-4619-8680-73bb5d413595" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1208.316632] env[63538]: DEBUG nova.network.neutron [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1208.424508] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101916, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.596667] env[63538]: DEBUG nova.compute.manager [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1208.683242] env[63538]: DEBUG nova.scheduler.client.report [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1208.708612] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e40cca11-e6ad-4cce-b257-b51ab72862bc tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "f0183c1f-4557-45fd-ba65-4821ef661173" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1208.708948] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e40cca11-e6ad-4cce-b257-b51ab72862bc tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "f0183c1f-4557-45fd-ba65-4821ef661173" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1208.709312] env[63538]: INFO nova.compute.manager [None req-e40cca11-e6ad-4cce-b257-b51ab72862bc tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Attaching volume 8dce72fb-6084-4a8a-8e72-83746e434be6 to /dev/sdb [ 1208.741940] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d64d85fd-5187-4414-8b5f-6abb802ec08f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.749913] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a2ce64-4d7d-4621-a5ef-d193527612b6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.763351] env[63538]: DEBUG nova.virt.block_device [None req-e40cca11-e6ad-4cce-b257-b51ab72862bc tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Updating existing volume attachment record: 997f153b-3a2b-4bfa-badc-2e729ceeccf1 {{(pid=63538) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1208.835986] env[63538]: DEBUG nova.network.neutron [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1208.883827] env[63538]: DEBUG nova.network.neutron [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1208.925341] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101916, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.130167] env[63538]: DEBUG oslo_concurrency.lockutils [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1209.188090] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.199s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1209.188648] env[63538]: DEBUG nova.compute.manager [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1209.191597] env[63538]: DEBUG oslo_concurrency.lockutils [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.440s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1209.193252] env[63538]: INFO nova.compute.claims [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1209.386792] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Releasing lock "refresh_cache-df85b1e1-0319-4619-8680-73bb5d413595" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1209.387344] env[63538]: DEBUG nova.compute.manager [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1209.387581] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1209.388561] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f78bb852-f3fb-4f37-baf7-fc4e84f0450f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.397495] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1209.397715] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9bee5a92-24ae-4f20-8957-4c30121a11e5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.404957] env[63538]: DEBUG oslo_vmware.api [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Waiting for the task: (returnval){ [ 1209.404957] env[63538]: value = "task-5101920" [ 1209.404957] env[63538]: _type = "Task" [ 1209.404957] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.413593] env[63538]: DEBUG oslo_vmware.api [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101920, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.426161] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101916, 'name': ReconfigVM_Task, 'duration_secs': 1.19602} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.426492] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Reconfigured VM instance instance-0000006a to attach disk [datastore1] d00151c1-ca34-4c57-9ed2-74d506a0cffb/d00151c1-ca34-4c57-9ed2-74d506a0cffb.vmdk or device None with type streamOptimized {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1209.427239] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-93f3bf8e-44cc-4dee-881a-92334062c18a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.434643] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1209.434643] env[63538]: value = "task-5101921" [ 1209.434643] env[63538]: _type = "Task" [ 1209.434643] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.444069] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101921, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.698613] env[63538]: DEBUG nova.compute.utils [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1209.702822] env[63538]: DEBUG nova.compute.manager [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1209.703102] env[63538]: DEBUG nova.network.neutron [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1209.759034] env[63538]: DEBUG nova.policy [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cd6de144ccc4498aa90ae01ca7a0f6f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0d6954a5254f441ca256c85330297cef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1209.916815] env[63538]: DEBUG oslo_vmware.api [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101920, 'name': PowerOffVM_Task, 'duration_secs': 0.109157} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.917257] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1209.917394] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1209.917591] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-34529a7d-d51f-4bef-bbd2-745ca26f431e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.942779] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1209.943064] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1209.943195] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Deleting the datastore file [datastore2] df85b1e1-0319-4619-8680-73bb5d413595 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1209.943485] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-20203801-da7b-4f77-89d6-1925ec3e1e69 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.948960] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101921, 'name': Rename_Task, 'duration_secs': 0.173459} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.949689] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1209.949942] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3f76d1e2-29ea-49df-a519-bdf23a74c8d3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.955161] env[63538]: DEBUG oslo_vmware.api [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Waiting for the task: (returnval){ [ 1209.955161] env[63538]: value = "task-5101923" [ 1209.955161] env[63538]: _type = "Task" [ 1209.955161] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.959960] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1209.959960] env[63538]: value = "task-5101924" [ 1209.959960] env[63538]: _type = "Task" [ 1209.959960] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.974172] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101924, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.977569] env[63538]: DEBUG oslo_vmware.api [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101923, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.078379] env[63538]: DEBUG nova.network.neutron [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Successfully created port: 3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1210.203656] env[63538]: DEBUG nova.compute.manager [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1210.329807] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41adb1a6-74cd-40ed-80ed-6007f344e781 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.339228] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef9745ef-d5e9-4649-9aca-9cf854cce176 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.370629] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df734c99-a2c9-4d50-8993-e5dfcb29ca6c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.379252] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31ec858-45ec-4ad0-87f9-2455b87b08f6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.394251] env[63538]: DEBUG nova.compute.provider_tree [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1210.470193] env[63538]: DEBUG oslo_vmware.api [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Task: {'id': task-5101923, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.118274} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.473129] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1210.473342] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1210.473538] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1210.473713] env[63538]: INFO nova.compute.manager [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1210.473962] env[63538]: DEBUG oslo.service.loopingcall [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1210.474198] env[63538]: DEBUG oslo_vmware.api [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101924, 'name': PowerOnVM_Task, 'duration_secs': 0.490094} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.474718] env[63538]: DEBUG nova.compute.manager [-] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1210.474818] env[63538]: DEBUG nova.network.neutron [-] [instance: df85b1e1-0319-4619-8680-73bb5d413595] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1210.476529] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1210.494685] env[63538]: DEBUG nova.network.neutron [-] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1210.581673] env[63538]: DEBUG nova.compute.manager [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1210.582633] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44bfaad-fde9-474b-8662-41655c6aa87d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.900659] env[63538]: DEBUG nova.scheduler.client.report [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1210.996996] env[63538]: DEBUG nova.network.neutron [-] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1211.102255] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e3c37cbd-d363-4b64-9091-4ea308ea3fb6 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 25.479s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1211.103534] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 28.729s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1211.103819] env[63538]: INFO nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] During sync_power_state the instance has a pending task (unshelving). Skip. [ 1211.104127] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1211.216230] env[63538]: DEBUG nova.compute.manager [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1211.261371] env[63538]: DEBUG nova.virt.hardware [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1211.261639] env[63538]: DEBUG nova.virt.hardware [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1211.261806] env[63538]: DEBUG nova.virt.hardware [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1211.261997] env[63538]: DEBUG nova.virt.hardware [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1211.262169] env[63538]: DEBUG nova.virt.hardware [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1211.262325] env[63538]: DEBUG nova.virt.hardware [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1211.262543] env[63538]: DEBUG nova.virt.hardware [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1211.262710] env[63538]: DEBUG nova.virt.hardware [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1211.262884] env[63538]: DEBUG nova.virt.hardware [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1211.263083] env[63538]: DEBUG nova.virt.hardware [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1211.263290] env[63538]: DEBUG nova.virt.hardware [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1211.264208] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea46a0a5-58d3-4386-ab61-953ae40ee3e6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.273354] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a5389e-04dd-4a82-85a4-47ce22a66180 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.407347] env[63538]: DEBUG oslo_concurrency.lockutils [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.216s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1211.407898] env[63538]: DEBUG nova.compute.manager [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1211.411040] env[63538]: DEBUG oslo_concurrency.lockutils [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 3.583s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1211.411252] env[63538]: DEBUG nova.objects.instance [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63538) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1211.500124] env[63538]: INFO nova.compute.manager [-] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Took 1.03 seconds to deallocate network for instance. [ 1211.558784] env[63538]: DEBUG nova.compute.manager [req-90cc3d11-fea2-444d-8aef-d4af36326b40 req-4ceb9a84-f737-40ce-b8ef-b9f165226503 service nova] [instance: 057f192d-b470-4683-b197-913457d10717] Received event network-vif-plugged-3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1211.559018] env[63538]: DEBUG oslo_concurrency.lockutils [req-90cc3d11-fea2-444d-8aef-d4af36326b40 req-4ceb9a84-f737-40ce-b8ef-b9f165226503 service nova] Acquiring lock "057f192d-b470-4683-b197-913457d10717-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1211.559259] env[63538]: DEBUG oslo_concurrency.lockutils [req-90cc3d11-fea2-444d-8aef-d4af36326b40 req-4ceb9a84-f737-40ce-b8ef-b9f165226503 service nova] Lock "057f192d-b470-4683-b197-913457d10717-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1211.559433] env[63538]: DEBUG oslo_concurrency.lockutils [req-90cc3d11-fea2-444d-8aef-d4af36326b40 req-4ceb9a84-f737-40ce-b8ef-b9f165226503 service nova] Lock "057f192d-b470-4683-b197-913457d10717-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1211.559607] env[63538]: DEBUG nova.compute.manager [req-90cc3d11-fea2-444d-8aef-d4af36326b40 req-4ceb9a84-f737-40ce-b8ef-b9f165226503 service nova] [instance: 057f192d-b470-4683-b197-913457d10717] No waiting events found dispatching network-vif-plugged-3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1211.559779] env[63538]: WARNING nova.compute.manager [req-90cc3d11-fea2-444d-8aef-d4af36326b40 req-4ceb9a84-f737-40ce-b8ef-b9f165226503 service nova] [instance: 057f192d-b470-4683-b197-913457d10717] Received unexpected event network-vif-plugged-3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1 for instance with vm_state building and task_state spawning. [ 1211.649526] env[63538]: DEBUG nova.network.neutron [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Successfully updated port: 3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1211.919363] env[63538]: DEBUG nova.compute.utils [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1211.921104] env[63538]: DEBUG nova.compute.manager [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1211.921279] env[63538]: DEBUG nova.network.neutron [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1211.962397] env[63538]: DEBUG nova.policy [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f315670d336b49d6a732297656ce515a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df090f9a727d4cf4a0f466e27928bdc6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1212.008518] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.152952] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "refresh_cache-057f192d-b470-4683-b197-913457d10717" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1212.152952] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired lock "refresh_cache-057f192d-b470-4683-b197-913457d10717" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.152952] env[63538]: DEBUG nova.network.neutron [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1212.258393] env[63538]: DEBUG nova.network.neutron [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Successfully created port: 056940fe-0d12-4a3b-a2be-582e970e06bf {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1212.422259] env[63538]: DEBUG oslo_concurrency.lockutils [None req-441476b5-877c-404a-ad42-30f38c079690 tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1212.423924] env[63538]: DEBUG oslo_concurrency.lockutils [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.294s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1212.425650] env[63538]: INFO nova.compute.claims [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1212.428751] env[63538]: DEBUG nova.compute.manager [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1212.684713] env[63538]: DEBUG nova.network.neutron [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1212.837406] env[63538]: DEBUG nova.network.neutron [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Updating instance_info_cache with network_info: [{"id": "3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1", "address": "fa:16:3e:73:08:09", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cef0125-1c", "ovs_interfaceid": "3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1213.337726] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Releasing lock "refresh_cache-057f192d-b470-4683-b197-913457d10717" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1213.338220] env[63538]: DEBUG nova.compute.manager [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Instance network_info: |[{"id": "3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1", "address": "fa:16:3e:73:08:09", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cef0125-1c", "ovs_interfaceid": "3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1213.338613] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:73:08:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f39e3b37-7906-4bbc-820e-ceac74e4d827', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1213.346694] env[63538]: DEBUG oslo.service.loopingcall [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1213.346976] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 057f192d-b470-4683-b197-913457d10717] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1213.347378] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-982256a7-db68-42df-b6bb-77107b4fee2a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.368123] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1213.368123] env[63538]: value = "task-5101926" [ 1213.368123] env[63538]: _type = "Task" [ 1213.368123] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.376512] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101926, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.442597] env[63538]: DEBUG nova.compute.manager [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1213.473757] env[63538]: DEBUG nova.virt.hardware [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1213.474063] env[63538]: DEBUG nova.virt.hardware [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1213.474337] env[63538]: DEBUG nova.virt.hardware [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1213.474510] env[63538]: DEBUG nova.virt.hardware [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1213.474678] env[63538]: DEBUG nova.virt.hardware [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1213.474849] env[63538]: DEBUG nova.virt.hardware [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1213.475253] env[63538]: DEBUG nova.virt.hardware [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1213.475373] env[63538]: DEBUG nova.virt.hardware [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1213.475582] env[63538]: DEBUG nova.virt.hardware [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1213.475760] env[63538]: DEBUG nova.virt.hardware [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1213.475942] env[63538]: DEBUG nova.virt.hardware [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1213.476822] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df37edfe-eb25-4fa6-9328-05e90b7dc4c6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.488217] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c5781a-d9b1-444e-838d-7ed5f4ef0177 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.576318] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6b980da-d8b8-4558-badc-5ab411491cb2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.584987] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c9edb32-c8a8-496d-b790-d2df6305fe4f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.623233] env[63538]: DEBUG nova.compute.manager [req-2784de01-84cc-4344-9a28-a6b2fb0f9763 req-f8905b54-ebbf-4975-8059-0aa3a06a9479 service nova] [instance: 057f192d-b470-4683-b197-913457d10717] Received event network-changed-3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1213.623440] env[63538]: DEBUG nova.compute.manager [req-2784de01-84cc-4344-9a28-a6b2fb0f9763 req-f8905b54-ebbf-4975-8059-0aa3a06a9479 service nova] [instance: 057f192d-b470-4683-b197-913457d10717] Refreshing instance network info cache due to event network-changed-3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1213.623653] env[63538]: DEBUG oslo_concurrency.lockutils [req-2784de01-84cc-4344-9a28-a6b2fb0f9763 req-f8905b54-ebbf-4975-8059-0aa3a06a9479 service nova] Acquiring lock "refresh_cache-057f192d-b470-4683-b197-913457d10717" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1213.623808] env[63538]: DEBUG oslo_concurrency.lockutils [req-2784de01-84cc-4344-9a28-a6b2fb0f9763 req-f8905b54-ebbf-4975-8059-0aa3a06a9479 service nova] Acquired lock "refresh_cache-057f192d-b470-4683-b197-913457d10717" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1213.623976] env[63538]: DEBUG nova.network.neutron [req-2784de01-84cc-4344-9a28-a6b2fb0f9763 req-f8905b54-ebbf-4975-8059-0aa3a06a9479 service nova] [instance: 057f192d-b470-4683-b197-913457d10717] Refreshing network info cache for port 3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1213.625777] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c164b93-03c5-4b03-aa05-2764a75d0741 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.636496] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d86eeaa4-b19c-4fd8-9e8f-9250bad0a396 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.653006] env[63538]: DEBUG nova.compute.provider_tree [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1213.810461] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e40cca11-e6ad-4cce-b257-b51ab72862bc tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Volume attach. Driver type: vmdk {{(pid=63538) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1213.810790] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e40cca11-e6ad-4cce-b257-b51ab72862bc tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992554', 'volume_id': '8dce72fb-6084-4a8a-8e72-83746e434be6', 'name': 'volume-8dce72fb-6084-4a8a-8e72-83746e434be6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f0183c1f-4557-45fd-ba65-4821ef661173', 'attached_at': '', 'detached_at': '', 'volume_id': '8dce72fb-6084-4a8a-8e72-83746e434be6', 'serial': '8dce72fb-6084-4a8a-8e72-83746e434be6'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1213.811988] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d5f4b9-b64c-4697-acca-ee5df221f5b5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.816509] env[63538]: DEBUG nova.network.neutron [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Successfully updated port: 056940fe-0d12-4a3b-a2be-582e970e06bf {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1213.830713] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30479e5a-06d9-46db-97dc-d5c93ddf1ef9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.856258] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e40cca11-e6ad-4cce-b257-b51ab72862bc tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] volume-8dce72fb-6084-4a8a-8e72-83746e434be6/volume-8dce72fb-6084-4a8a-8e72-83746e434be6.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1213.856572] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-841a28f5-4619-4366-8542-ecb82ca24950 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.878239] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101926, 'name': CreateVM_Task, 'duration_secs': 0.380631} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.879378] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 057f192d-b470-4683-b197-913457d10717] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1213.879738] env[63538]: DEBUG oslo_vmware.api [None req-e40cca11-e6ad-4cce-b257-b51ab72862bc tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1213.879738] env[63538]: value = "task-5101927" [ 1213.879738] env[63538]: _type = "Task" [ 1213.879738] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.880422] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1213.880597] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1213.880957] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1213.881326] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d58d189-0ef1-4da2-9699-416efa89f7b9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.891877] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1213.891877] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523b894e-ba7a-c002-098c-d78480cfcc42" [ 1213.891877] env[63538]: _type = "Task" [ 1213.891877] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.895323] env[63538]: DEBUG oslo_vmware.api [None req-e40cca11-e6ad-4cce-b257-b51ab72862bc tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101927, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.904573] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523b894e-ba7a-c002-098c-d78480cfcc42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.156588] env[63538]: DEBUG nova.scheduler.client.report [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1214.330278] env[63538]: DEBUG oslo_concurrency.lockutils [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "refresh_cache-e93aab2e-f8c4-4959-923f-0449a84108d6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1214.330479] env[63538]: DEBUG oslo_concurrency.lockutils [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquired lock "refresh_cache-e93aab2e-f8c4-4959-923f-0449a84108d6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1214.330661] env[63538]: DEBUG nova.network.neutron [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1214.373144] env[63538]: DEBUG nova.network.neutron [req-2784de01-84cc-4344-9a28-a6b2fb0f9763 req-f8905b54-ebbf-4975-8059-0aa3a06a9479 service nova] [instance: 057f192d-b470-4683-b197-913457d10717] Updated VIF entry in instance network info cache for port 3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1214.373530] env[63538]: DEBUG nova.network.neutron [req-2784de01-84cc-4344-9a28-a6b2fb0f9763 req-f8905b54-ebbf-4975-8059-0aa3a06a9479 service nova] [instance: 057f192d-b470-4683-b197-913457d10717] Updating instance_info_cache with network_info: [{"id": "3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1", "address": "fa:16:3e:73:08:09", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cef0125-1c", "ovs_interfaceid": "3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1214.392309] env[63538]: DEBUG oslo_vmware.api [None req-e40cca11-e6ad-4cce-b257-b51ab72862bc tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101927, 'name': ReconfigVM_Task, 'duration_secs': 0.385978} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.392603] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e40cca11-e6ad-4cce-b257-b51ab72862bc tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Reconfigured VM instance instance-00000071 to attach disk [datastore1] volume-8dce72fb-6084-4a8a-8e72-83746e434be6/volume-8dce72fb-6084-4a8a-8e72-83746e434be6.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1214.397791] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3b86d68-67d6-4246-89d5-597613de1fbc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.417647] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523b894e-ba7a-c002-098c-d78480cfcc42, 'name': SearchDatastore_Task, 'duration_secs': 0.012184} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.419063] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1214.419327] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1214.419580] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1214.419735] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1214.419922] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1214.420298] env[63538]: DEBUG oslo_vmware.api [None req-e40cca11-e6ad-4cce-b257-b51ab72862bc tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1214.420298] env[63538]: value = "task-5101928" [ 1214.420298] env[63538]: _type = "Task" [ 1214.420298] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.420475] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7338daa2-e44d-4955-b1e4-504236205617 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.431617] env[63538]: DEBUG oslo_vmware.api [None req-e40cca11-e6ad-4cce-b257-b51ab72862bc tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101928, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.433365] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1214.433618] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1214.434369] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8da61582-aeea-4aec-bc02-4727679e7972 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.441615] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1214.441615] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e3aa28-12ba-8f60-e26f-0878986c5c7a" [ 1214.441615] env[63538]: _type = "Task" [ 1214.441615] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.451866] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e3aa28-12ba-8f60-e26f-0878986c5c7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.663717] env[63538]: DEBUG oslo_concurrency.lockutils [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.240s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1214.664279] env[63538]: DEBUG nova.compute.manager [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1214.667081] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.659s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1214.667361] env[63538]: DEBUG nova.objects.instance [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Lazy-loading 'resources' on Instance uuid df85b1e1-0319-4619-8680-73bb5d413595 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1214.862757] env[63538]: DEBUG nova.network.neutron [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1214.876907] env[63538]: DEBUG oslo_concurrency.lockutils [req-2784de01-84cc-4344-9a28-a6b2fb0f9763 req-f8905b54-ebbf-4975-8059-0aa3a06a9479 service nova] Releasing lock "refresh_cache-057f192d-b470-4683-b197-913457d10717" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1214.934136] env[63538]: DEBUG oslo_vmware.api [None req-e40cca11-e6ad-4cce-b257-b51ab72862bc tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101928, 'name': ReconfigVM_Task, 'duration_secs': 0.155534} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.934455] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-e40cca11-e6ad-4cce-b257-b51ab72862bc tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992554', 'volume_id': '8dce72fb-6084-4a8a-8e72-83746e434be6', 'name': 'volume-8dce72fb-6084-4a8a-8e72-83746e434be6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f0183c1f-4557-45fd-ba65-4821ef661173', 'attached_at': '', 'detached_at': '', 'volume_id': '8dce72fb-6084-4a8a-8e72-83746e434be6', 'serial': '8dce72fb-6084-4a8a-8e72-83746e434be6'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1214.951707] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e3aa28-12ba-8f60-e26f-0878986c5c7a, 'name': SearchDatastore_Task, 'duration_secs': 0.010862} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.952456] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94849bdc-717d-4b62-8aa3-08c58323a4ca {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.958992] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1214.958992] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5220a3c5-2de6-0a96-ec4e-4cfc22339ade" [ 1214.958992] env[63538]: _type = "Task" [ 1214.958992] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.971350] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5220a3c5-2de6-0a96-ec4e-4cfc22339ade, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.997648] env[63538]: DEBUG nova.network.neutron [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Updating instance_info_cache with network_info: [{"id": "056940fe-0d12-4a3b-a2be-582e970e06bf", "address": "fa:16:3e:0f:03:e4", "network": {"id": "4d592811-793b-45d6-a6d3-fff8ca7bbf30", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-654803980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df090f9a727d4cf4a0f466e27928bdc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap056940fe-0d", "ovs_interfaceid": "056940fe-0d12-4a3b-a2be-582e970e06bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1215.170112] env[63538]: DEBUG nova.compute.utils [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1215.174426] env[63538]: DEBUG nova.compute.manager [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1215.174566] env[63538]: DEBUG nova.network.neutron [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1215.225843] env[63538]: DEBUG nova.policy [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb444448a4d64c5e8ec9613ed633a527', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9b1eba931f144b94b6e186dac1310dfa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1215.286351] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a144f3-18fa-496f-98ad-6c36a722e0e2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.294477] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6767f81e-ce44-4d5c-ad52-f08fcbedeebe {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.326480] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f6f55a6-a276-4872-881c-c5e26c50afe4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.334963] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c5ad00f-dc4c-4b83-832b-f13927016753 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.349460] env[63538]: DEBUG nova.compute.provider_tree [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1215.470184] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5220a3c5-2de6-0a96-ec4e-4cfc22339ade, 'name': SearchDatastore_Task, 'duration_secs': 0.010881} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.470391] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1215.470568] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 057f192d-b470-4683-b197-913457d10717/057f192d-b470-4683-b197-913457d10717.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1215.470852] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09f475d4-14f9-4bfc-8455-cc9cf007608b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.478602] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1215.478602] env[63538]: value = "task-5101929" [ 1215.478602] env[63538]: _type = "Task" [ 1215.478602] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.489415] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101929, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.500447] env[63538]: DEBUG oslo_concurrency.lockutils [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Releasing lock "refresh_cache-e93aab2e-f8c4-4959-923f-0449a84108d6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1215.501545] env[63538]: DEBUG nova.compute.manager [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Instance network_info: |[{"id": "056940fe-0d12-4a3b-a2be-582e970e06bf", "address": "fa:16:3e:0f:03:e4", "network": {"id": "4d592811-793b-45d6-a6d3-fff8ca7bbf30", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-654803980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df090f9a727d4cf4a0f466e27928bdc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap056940fe-0d", "ovs_interfaceid": "056940fe-0d12-4a3b-a2be-582e970e06bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1215.501545] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:03:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c621a9c-66f5-426a-8aab-bd8b2e912106', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '056940fe-0d12-4a3b-a2be-582e970e06bf', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1215.510235] env[63538]: DEBUG oslo.service.loopingcall [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1215.512206] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1215.512206] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e3286d55-42dd-4b01-a245-9ae3a1d90f5b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.528146] env[63538]: DEBUG nova.network.neutron [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Successfully created port: 632e844e-c3bd-4596-8708-86ed6e0abef8 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1215.537157] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1215.537157] env[63538]: value = "task-5101930" [ 1215.537157] env[63538]: _type = "Task" [ 1215.537157] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.547072] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101930, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.630436] env[63538]: DEBUG nova.compute.manager [req-c526ddbf-2ab6-4c40-9eb9-f010440ab669 req-81474e1a-e598-436c-9bc5-5b910413b44d service nova] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Received event network-vif-plugged-056940fe-0d12-4a3b-a2be-582e970e06bf {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1215.630738] env[63538]: DEBUG oslo_concurrency.lockutils [req-c526ddbf-2ab6-4c40-9eb9-f010440ab669 req-81474e1a-e598-436c-9bc5-5b910413b44d service nova] Acquiring lock "e93aab2e-f8c4-4959-923f-0449a84108d6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1215.631187] env[63538]: DEBUG oslo_concurrency.lockutils [req-c526ddbf-2ab6-4c40-9eb9-f010440ab669 req-81474e1a-e598-436c-9bc5-5b910413b44d service nova] Lock "e93aab2e-f8c4-4959-923f-0449a84108d6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1215.631561] env[63538]: DEBUG oslo_concurrency.lockutils [req-c526ddbf-2ab6-4c40-9eb9-f010440ab669 req-81474e1a-e598-436c-9bc5-5b910413b44d service nova] Lock "e93aab2e-f8c4-4959-923f-0449a84108d6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1215.631832] env[63538]: DEBUG nova.compute.manager [req-c526ddbf-2ab6-4c40-9eb9-f010440ab669 req-81474e1a-e598-436c-9bc5-5b910413b44d service nova] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] No waiting events found dispatching network-vif-plugged-056940fe-0d12-4a3b-a2be-582e970e06bf {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1215.632087] env[63538]: WARNING nova.compute.manager [req-c526ddbf-2ab6-4c40-9eb9-f010440ab669 req-81474e1a-e598-436c-9bc5-5b910413b44d service nova] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Received unexpected event network-vif-plugged-056940fe-0d12-4a3b-a2be-582e970e06bf for instance with vm_state building and task_state spawning. [ 1215.632599] env[63538]: DEBUG nova.compute.manager [req-c526ddbf-2ab6-4c40-9eb9-f010440ab669 req-81474e1a-e598-436c-9bc5-5b910413b44d service nova] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Received event network-changed-056940fe-0d12-4a3b-a2be-582e970e06bf {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1215.632859] env[63538]: DEBUG nova.compute.manager [req-c526ddbf-2ab6-4c40-9eb9-f010440ab669 req-81474e1a-e598-436c-9bc5-5b910413b44d service nova] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Refreshing instance network info cache due to event network-changed-056940fe-0d12-4a3b-a2be-582e970e06bf. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1215.633145] env[63538]: DEBUG oslo_concurrency.lockutils [req-c526ddbf-2ab6-4c40-9eb9-f010440ab669 req-81474e1a-e598-436c-9bc5-5b910413b44d service nova] Acquiring lock "refresh_cache-e93aab2e-f8c4-4959-923f-0449a84108d6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1215.633377] env[63538]: DEBUG oslo_concurrency.lockutils [req-c526ddbf-2ab6-4c40-9eb9-f010440ab669 req-81474e1a-e598-436c-9bc5-5b910413b44d service nova] Acquired lock "refresh_cache-e93aab2e-f8c4-4959-923f-0449a84108d6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1215.633615] env[63538]: DEBUG nova.network.neutron [req-c526ddbf-2ab6-4c40-9eb9-f010440ab669 req-81474e1a-e598-436c-9bc5-5b910413b44d service nova] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Refreshing network info cache for port 056940fe-0d12-4a3b-a2be-582e970e06bf {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1215.678175] env[63538]: DEBUG nova.compute.manager [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1215.852954] env[63538]: DEBUG nova.scheduler.client.report [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1215.989655] env[63538]: DEBUG nova.objects.instance [None req-e40cca11-e6ad-4cce-b257-b51ab72862bc tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lazy-loading 'flavor' on Instance uuid f0183c1f-4557-45fd-ba65-4821ef661173 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1215.991139] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101929, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.492141} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.991555] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 057f192d-b470-4683-b197-913457d10717/057f192d-b470-4683-b197-913457d10717.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1215.992246] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1215.992246] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fd672b25-4bd8-4d9f-a342-f111a6318389 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.999623] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1215.999623] env[63538]: value = "task-5101931" [ 1215.999623] env[63538]: _type = "Task" [ 1215.999623] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.011481] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101931, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.048089] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101930, 'name': CreateVM_Task, 'duration_secs': 0.375915} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.048319] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1216.049115] env[63538]: DEBUG oslo_concurrency.lockutils [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1216.049301] env[63538]: DEBUG oslo_concurrency.lockutils [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1216.049636] env[63538]: DEBUG oslo_concurrency.lockutils [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1216.049901] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25f18303-d32c-4090-9718-f7f182624e36 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.055338] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1216.055338] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5247af84-1f32-33d9-8689-87e147fa2109" [ 1216.055338] env[63538]: _type = "Task" [ 1216.055338] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.064560] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5247af84-1f32-33d9-8689-87e147fa2109, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.358666] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.691s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1216.380125] env[63538]: INFO nova.scheduler.client.report [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Deleted allocations for instance df85b1e1-0319-4619-8680-73bb5d413595 [ 1216.421824] env[63538]: DEBUG nova.network.neutron [req-c526ddbf-2ab6-4c40-9eb9-f010440ab669 req-81474e1a-e598-436c-9bc5-5b910413b44d service nova] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Updated VIF entry in instance network info cache for port 056940fe-0d12-4a3b-a2be-582e970e06bf. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1216.422212] env[63538]: DEBUG nova.network.neutron [req-c526ddbf-2ab6-4c40-9eb9-f010440ab669 req-81474e1a-e598-436c-9bc5-5b910413b44d service nova] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Updating instance_info_cache with network_info: [{"id": "056940fe-0d12-4a3b-a2be-582e970e06bf", "address": "fa:16:3e:0f:03:e4", "network": {"id": "4d592811-793b-45d6-a6d3-fff8ca7bbf30", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-654803980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df090f9a727d4cf4a0f466e27928bdc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap056940fe-0d", "ovs_interfaceid": "056940fe-0d12-4a3b-a2be-582e970e06bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1216.497326] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e40cca11-e6ad-4cce-b257-b51ab72862bc tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "f0183c1f-4557-45fd-ba65-4821ef661173" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.788s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1216.510440] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101931, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067955} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.511668] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1216.512578] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03cbbeb-19de-4df6-abb0-be0b4a58c6b7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.538353] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] 057f192d-b470-4683-b197-913457d10717/057f192d-b470-4683-b197-913457d10717.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1216.539135] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70c95e44-6c52-41b3-90ce-e6e541d9160b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.562208] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1216.562208] env[63538]: value = "task-5101932" [ 1216.562208] env[63538]: _type = "Task" [ 1216.562208] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.566651] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5247af84-1f32-33d9-8689-87e147fa2109, 'name': SearchDatastore_Task, 'duration_secs': 0.01093} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.570425] env[63538]: DEBUG oslo_concurrency.lockutils [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1216.570703] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1216.570950] env[63538]: DEBUG oslo_concurrency.lockutils [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1216.571123] env[63538]: DEBUG oslo_concurrency.lockutils [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1216.571310] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1216.571633] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d9e5b3a3-b86a-47d9-b092-6651ee12fa93 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.580083] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101932, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.583183] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1216.583381] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1216.584144] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c923051-c4ee-4a8e-a116-cf196ea0f989 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.590455] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1216.590455] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526cf45f-b17f-2df8-94f4-861640d9c587" [ 1216.590455] env[63538]: _type = "Task" [ 1216.590455] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.600978] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526cf45f-b17f-2df8-94f4-861640d9c587, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.688364] env[63538]: DEBUG nova.compute.manager [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1216.715273] env[63538]: DEBUG nova.virt.hardware [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1216.715576] env[63538]: DEBUG nova.virt.hardware [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1216.715751] env[63538]: DEBUG nova.virt.hardware [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1216.715944] env[63538]: DEBUG nova.virt.hardware [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1216.716117] env[63538]: DEBUG nova.virt.hardware [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1216.716282] env[63538]: DEBUG nova.virt.hardware [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1216.716503] env[63538]: DEBUG nova.virt.hardware [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1216.716665] env[63538]: DEBUG nova.virt.hardware [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1216.716835] env[63538]: DEBUG nova.virt.hardware [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1216.717018] env[63538]: DEBUG nova.virt.hardware [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1216.717275] env[63538]: DEBUG nova.virt.hardware [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1216.718184] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366220bd-0b97-456d-a9c2-88e127819aa3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.726882] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0196fc28-12db-4046-8360-d04651bc66b5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.891307] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bb3409cd-8502-4f18-aec7-94356a316e2b tempest-ServerShowV257Test-809447213 tempest-ServerShowV257Test-809447213-project-member] Lock "df85b1e1-0319-4619-8680-73bb5d413595" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.580s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1216.925081] env[63538]: DEBUG oslo_concurrency.lockutils [req-c526ddbf-2ab6-4c40-9eb9-f010440ab669 req-81474e1a-e598-436c-9bc5-5b910413b44d service nova] Releasing lock "refresh_cache-e93aab2e-f8c4-4959-923f-0449a84108d6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1216.950971] env[63538]: DEBUG nova.compute.manager [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Stashing vm_state: active {{(pid=63538) _prep_resize /opt/stack/nova/nova/compute/manager.py:5670}} [ 1217.078913] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101932, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.101108] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]526cf45f-b17f-2df8-94f4-861640d9c587, 'name': SearchDatastore_Task, 'duration_secs': 0.011977} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.101985] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8912acbd-e5ce-46f7-929d-7eb2a79d9db2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.108885] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1217.108885] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e459fd-7c24-74ce-1640-c71d645b2aa7" [ 1217.108885] env[63538]: _type = "Task" [ 1217.108885] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.120938] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e459fd-7c24-74ce-1640-c71d645b2aa7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.238608] env[63538]: DEBUG nova.network.neutron [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Successfully updated port: 632e844e-c3bd-4596-8708-86ed6e0abef8 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1217.472937] env[63538]: DEBUG oslo_concurrency.lockutils [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1217.473324] env[63538]: DEBUG oslo_concurrency.lockutils [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1217.577785] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101932, 'name': ReconfigVM_Task, 'duration_secs': 1.006068} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.578224] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Reconfigured VM instance instance-00000076 to attach disk [datastore1] 057f192d-b470-4683-b197-913457d10717/057f192d-b470-4683-b197-913457d10717.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1217.578876] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-838b1162-b31b-4481-af3d-889d23bb0af6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.587431] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1217.587431] env[63538]: value = "task-5101933" [ 1217.587431] env[63538]: _type = "Task" [ 1217.587431] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.596622] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101933, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.621904] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52e459fd-7c24-74ce-1640-c71d645b2aa7, 'name': SearchDatastore_Task, 'duration_secs': 0.012414} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.622198] env[63538]: DEBUG oslo_concurrency.lockutils [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1217.622465] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] e93aab2e-f8c4-4959-923f-0449a84108d6/e93aab2e-f8c4-4959-923f-0449a84108d6.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1217.622733] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed2e354a-5845-4914-9be6-89044292100e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.630612] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1217.630612] env[63538]: value = "task-5101934" [ 1217.630612] env[63538]: _type = "Task" [ 1217.630612] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.639079] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101934, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.658148] env[63538]: DEBUG nova.compute.manager [req-264c497f-cb9b-4bde-a51b-8ca623994a04 req-17bc444b-88bb-4487-8257-1f197691a63b service nova] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Received event network-vif-plugged-632e844e-c3bd-4596-8708-86ed6e0abef8 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1217.658413] env[63538]: DEBUG oslo_concurrency.lockutils [req-264c497f-cb9b-4bde-a51b-8ca623994a04 req-17bc444b-88bb-4487-8257-1f197691a63b service nova] Acquiring lock "c1766d8e-7949-4fa8-a762-007d016a4de1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1217.658698] env[63538]: DEBUG oslo_concurrency.lockutils [req-264c497f-cb9b-4bde-a51b-8ca623994a04 req-17bc444b-88bb-4487-8257-1f197691a63b service nova] Lock "c1766d8e-7949-4fa8-a762-007d016a4de1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1217.658940] env[63538]: DEBUG oslo_concurrency.lockutils [req-264c497f-cb9b-4bde-a51b-8ca623994a04 req-17bc444b-88bb-4487-8257-1f197691a63b service nova] Lock "c1766d8e-7949-4fa8-a762-007d016a4de1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1217.659216] env[63538]: DEBUG nova.compute.manager [req-264c497f-cb9b-4bde-a51b-8ca623994a04 req-17bc444b-88bb-4487-8257-1f197691a63b service nova] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] No waiting events found dispatching network-vif-plugged-632e844e-c3bd-4596-8708-86ed6e0abef8 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1217.659405] env[63538]: WARNING nova.compute.manager [req-264c497f-cb9b-4bde-a51b-8ca623994a04 req-17bc444b-88bb-4487-8257-1f197691a63b service nova] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Received unexpected event network-vif-plugged-632e844e-c3bd-4596-8708-86ed6e0abef8 for instance with vm_state building and task_state spawning. [ 1217.659622] env[63538]: DEBUG nova.compute.manager [req-264c497f-cb9b-4bde-a51b-8ca623994a04 req-17bc444b-88bb-4487-8257-1f197691a63b service nova] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Received event network-changed-632e844e-c3bd-4596-8708-86ed6e0abef8 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1217.659807] env[63538]: DEBUG nova.compute.manager [req-264c497f-cb9b-4bde-a51b-8ca623994a04 req-17bc444b-88bb-4487-8257-1f197691a63b service nova] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Refreshing instance network info cache due to event network-changed-632e844e-c3bd-4596-8708-86ed6e0abef8. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1217.660048] env[63538]: DEBUG oslo_concurrency.lockutils [req-264c497f-cb9b-4bde-a51b-8ca623994a04 req-17bc444b-88bb-4487-8257-1f197691a63b service nova] Acquiring lock "refresh_cache-c1766d8e-7949-4fa8-a762-007d016a4de1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1217.660205] env[63538]: DEBUG oslo_concurrency.lockutils [req-264c497f-cb9b-4bde-a51b-8ca623994a04 req-17bc444b-88bb-4487-8257-1f197691a63b service nova] Acquired lock "refresh_cache-c1766d8e-7949-4fa8-a762-007d016a4de1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1217.660374] env[63538]: DEBUG nova.network.neutron [req-264c497f-cb9b-4bde-a51b-8ca623994a04 req-17bc444b-88bb-4487-8257-1f197691a63b service nova] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Refreshing network info cache for port 632e844e-c3bd-4596-8708-86ed6e0abef8 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1217.741851] env[63538]: DEBUG oslo_concurrency.lockutils [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "refresh_cache-c1766d8e-7949-4fa8-a762-007d016a4de1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1217.979362] env[63538]: INFO nova.compute.claims [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1218.099249] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101933, 'name': Rename_Task, 'duration_secs': 0.182523} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.099599] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1218.099879] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b3165f64-0cc1-45b1-8bf3-ca9bfb76862c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.108612] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1218.108612] env[63538]: value = "task-5101935" [ 1218.108612] env[63538]: _type = "Task" [ 1218.108612] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.118974] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101935, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.141057] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101934, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.192960] env[63538]: DEBUG nova.network.neutron [req-264c497f-cb9b-4bde-a51b-8ca623994a04 req-17bc444b-88bb-4487-8257-1f197691a63b service nova] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1218.265532] env[63538]: DEBUG nova.network.neutron [req-264c497f-cb9b-4bde-a51b-8ca623994a04 req-17bc444b-88bb-4487-8257-1f197691a63b service nova] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1218.487068] env[63538]: INFO nova.compute.resource_tracker [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Updating resource usage from migration d6e3f498-ebbd-4c3e-8822-e3b13a93f7c8 [ 1218.601242] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31351b3e-be1e-40ed-b676-b95a26e870b3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.614759] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a391a070-edf5-469a-bd04-b4e41450893c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.623956] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101935, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.651928] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d4e6f4-36e3-4ea6-b3df-9483c6e23002 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.662947] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af9ddddf-71c9-48bf-94da-1fb36e8b2b6e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.666816] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101934, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.677599] env[63538]: DEBUG nova.compute.provider_tree [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1218.768894] env[63538]: DEBUG oslo_concurrency.lockutils [req-264c497f-cb9b-4bde-a51b-8ca623994a04 req-17bc444b-88bb-4487-8257-1f197691a63b service nova] Releasing lock "refresh_cache-c1766d8e-7949-4fa8-a762-007d016a4de1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1218.769341] env[63538]: DEBUG oslo_concurrency.lockutils [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquired lock "refresh_cache-c1766d8e-7949-4fa8-a762-007d016a4de1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1218.769515] env[63538]: DEBUG nova.network.neutron [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1219.118955] env[63538]: DEBUG oslo_vmware.api [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101935, 'name': PowerOnVM_Task, 'duration_secs': 0.611064} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.119258] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1219.119476] env[63538]: INFO nova.compute.manager [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Took 7.90 seconds to spawn the instance on the hypervisor. [ 1219.119664] env[63538]: DEBUG nova.compute.manager [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1219.120485] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1d4e47e-ed83-4f18-bf99-15ff0aecbce8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.158509] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101934, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.512925} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.158841] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] e93aab2e-f8c4-4959-923f-0449a84108d6/e93aab2e-f8c4-4959-923f-0449a84108d6.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1219.159162] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1219.159336] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aef30ced-f7f2-4199-a6f4-9c5c5c05302e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.166881] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1219.166881] env[63538]: value = "task-5101936" [ 1219.166881] env[63538]: _type = "Task" [ 1219.166881] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.178761] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101936, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.180722] env[63538]: DEBUG nova.scheduler.client.report [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1219.301660] env[63538]: DEBUG nova.network.neutron [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1219.468828] env[63538]: DEBUG nova.network.neutron [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Updating instance_info_cache with network_info: [{"id": "632e844e-c3bd-4596-8708-86ed6e0abef8", "address": "fa:16:3e:dc:12:67", "network": {"id": "64d377b2-188d-4b62-8b84-64d9c351ca10", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1354632003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b1eba931f144b94b6e186dac1310dfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap632e844e-c3", "ovs_interfaceid": "632e844e-c3bd-4596-8708-86ed6e0abef8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1219.642155] env[63538]: INFO nova.compute.manager [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Took 14.35 seconds to build instance. [ 1219.677589] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101936, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061352} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.677876] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1219.678993] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c485507-c647-4647-bdc8-8eae5f6a86a2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.693953] env[63538]: DEBUG oslo_concurrency.lockutils [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.221s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1219.694210] env[63538]: INFO nova.compute.manager [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Migrating [ 1219.709263] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] e93aab2e-f8c4-4959-923f-0449a84108d6/e93aab2e-f8c4-4959-923f-0449a84108d6.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1219.712710] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8401c3df-2994-4f91-98f7-605a548b1b65 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.737795] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1219.737795] env[63538]: value = "task-5101937" [ 1219.737795] env[63538]: _type = "Task" [ 1219.737795] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.748136] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101937, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.938621] env[63538]: DEBUG nova.compute.manager [req-ab444769-69c6-47bb-82fb-665a579fe8e1 req-f8abaeb2-1315-4d51-87ac-4b8d10578c1e service nova] [instance: 057f192d-b470-4683-b197-913457d10717] Received event network-changed-3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1219.938760] env[63538]: DEBUG nova.compute.manager [req-ab444769-69c6-47bb-82fb-665a579fe8e1 req-f8abaeb2-1315-4d51-87ac-4b8d10578c1e service nova] [instance: 057f192d-b470-4683-b197-913457d10717] Refreshing instance network info cache due to event network-changed-3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1219.938988] env[63538]: DEBUG oslo_concurrency.lockutils [req-ab444769-69c6-47bb-82fb-665a579fe8e1 req-f8abaeb2-1315-4d51-87ac-4b8d10578c1e service nova] Acquiring lock "refresh_cache-057f192d-b470-4683-b197-913457d10717" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1219.939164] env[63538]: DEBUG oslo_concurrency.lockutils [req-ab444769-69c6-47bb-82fb-665a579fe8e1 req-f8abaeb2-1315-4d51-87ac-4b8d10578c1e service nova] Acquired lock "refresh_cache-057f192d-b470-4683-b197-913457d10717" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1219.939338] env[63538]: DEBUG nova.network.neutron [req-ab444769-69c6-47bb-82fb-665a579fe8e1 req-f8abaeb2-1315-4d51-87ac-4b8d10578c1e service nova] [instance: 057f192d-b470-4683-b197-913457d10717] Refreshing network info cache for port 3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1219.971779] env[63538]: DEBUG oslo_concurrency.lockutils [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Releasing lock "refresh_cache-c1766d8e-7949-4fa8-a762-007d016a4de1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1219.972154] env[63538]: DEBUG nova.compute.manager [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Instance network_info: |[{"id": "632e844e-c3bd-4596-8708-86ed6e0abef8", "address": "fa:16:3e:dc:12:67", "network": {"id": "64d377b2-188d-4b62-8b84-64d9c351ca10", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1354632003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b1eba931f144b94b6e186dac1310dfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap632e844e-c3", "ovs_interfaceid": "632e844e-c3bd-4596-8708-86ed6e0abef8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1219.972587] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:12:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37434b93-dfdc-4a3f-bf5a-9f2cbe25a754', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '632e844e-c3bd-4596-8708-86ed6e0abef8', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1219.980685] env[63538]: DEBUG oslo.service.loopingcall [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1219.981485] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1219.981739] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ec603eec-e4ba-4ee8-b185-3aafef0153ea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.002599] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1220.002599] env[63538]: value = "task-5101938" [ 1220.002599] env[63538]: _type = "Task" [ 1220.002599] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.010957] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101938, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.144857] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a89eb8a-5523-415f-8e84-a9477404594e tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "057f192d-b470-4683-b197-913457d10717" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.860s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1220.233611] env[63538]: DEBUG oslo_concurrency.lockutils [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "refresh_cache-f0183c1f-4557-45fd-ba65-4821ef661173" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1220.233827] env[63538]: DEBUG oslo_concurrency.lockutils [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquired lock "refresh_cache-f0183c1f-4557-45fd-ba65-4821ef661173" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1220.234080] env[63538]: DEBUG nova.network.neutron [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1220.248177] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101937, 'name': ReconfigVM_Task, 'duration_secs': 0.331229} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.248434] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Reconfigured VM instance instance-00000077 to attach disk [datastore1] e93aab2e-f8c4-4959-923f-0449a84108d6/e93aab2e-f8c4-4959-923f-0449a84108d6.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1220.249083] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-482da3f4-2329-43af-bb04-907b10f33a97 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.258155] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1220.258155] env[63538]: value = "task-5101939" [ 1220.258155] env[63538]: _type = "Task" [ 1220.258155] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.270240] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101939, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.516217] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101938, 'name': CreateVM_Task, 'duration_secs': 0.395929} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.516520] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1220.517349] env[63538]: DEBUG oslo_concurrency.lockutils [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1220.517537] env[63538]: DEBUG oslo_concurrency.lockutils [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1220.517942] env[63538]: DEBUG oslo_concurrency.lockutils [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1220.518144] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b4e8010-231a-46d1-9855-4b0901c50129 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.523474] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1220.523474] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d620cd-f6d6-2460-51cd-c208f885dc93" [ 1220.523474] env[63538]: _type = "Task" [ 1220.523474] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.531943] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d620cd-f6d6-2460-51cd-c208f885dc93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.655647] env[63538]: DEBUG nova.network.neutron [req-ab444769-69c6-47bb-82fb-665a579fe8e1 req-f8abaeb2-1315-4d51-87ac-4b8d10578c1e service nova] [instance: 057f192d-b470-4683-b197-913457d10717] Updated VIF entry in instance network info cache for port 3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1220.656029] env[63538]: DEBUG nova.network.neutron [req-ab444769-69c6-47bb-82fb-665a579fe8e1 req-f8abaeb2-1315-4d51-87ac-4b8d10578c1e service nova] [instance: 057f192d-b470-4683-b197-913457d10717] Updating instance_info_cache with network_info: [{"id": "3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1", "address": "fa:16:3e:73:08:09", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cef0125-1c", "ovs_interfaceid": "3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1220.768803] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101939, 'name': Rename_Task, 'duration_secs': 0.163278} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.769099] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1220.769435] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-71423eb3-6a0e-46cb-ad09-9b6d88c52f28 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.776572] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1220.776572] env[63538]: value = "task-5101940" [ 1220.776572] env[63538]: _type = "Task" [ 1220.776572] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.784907] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101940, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.986599] env[63538]: DEBUG nova.network.neutron [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Updating instance_info_cache with network_info: [{"id": "9f09c892-0333-4063-a5da-daa3e2bf19f5", "address": "fa:16:3e:68:a4:9f", "network": {"id": "690e6150-6e85-472d-baeb-85e69afd037b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1831468396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a06b7cc1ab24ba584bbe970e4fc5e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f09c892-03", "ovs_interfaceid": "9f09c892-0333-4063-a5da-daa3e2bf19f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1221.035648] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52d620cd-f6d6-2460-51cd-c208f885dc93, 'name': SearchDatastore_Task, 'duration_secs': 0.014917} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.036011] env[63538]: DEBUG oslo_concurrency.lockutils [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1221.036296] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1221.036545] env[63538]: DEBUG oslo_concurrency.lockutils [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1221.036704] env[63538]: DEBUG oslo_concurrency.lockutils [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1221.036888] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1221.037194] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d8d311d6-27f6-40aa-bc3a-1defba4e4eb8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.046642] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1221.046642] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1221.047378] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77f623cb-99a1-4095-acb3-9bbe041f4787 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.052965] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1221.052965] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52828c69-d578-3a86-0a8e-a928d75d0ea4" [ 1221.052965] env[63538]: _type = "Task" [ 1221.052965] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.061083] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52828c69-d578-3a86-0a8e-a928d75d0ea4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.159061] env[63538]: DEBUG oslo_concurrency.lockutils [req-ab444769-69c6-47bb-82fb-665a579fe8e1 req-f8abaeb2-1315-4d51-87ac-4b8d10578c1e service nova] Releasing lock "refresh_cache-057f192d-b470-4683-b197-913457d10717" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1221.287557] env[63538]: DEBUG oslo_vmware.api [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101940, 'name': PowerOnVM_Task, 'duration_secs': 0.451984} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.287844] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1221.288030] env[63538]: INFO nova.compute.manager [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Took 7.85 seconds to spawn the instance on the hypervisor. [ 1221.288249] env[63538]: DEBUG nova.compute.manager [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1221.289072] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f4caefa-f844-4507-93c4-deae22a45d3a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.490081] env[63538]: DEBUG oslo_concurrency.lockutils [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Releasing lock "refresh_cache-f0183c1f-4557-45fd-ba65-4821ef661173" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1221.564978] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52828c69-d578-3a86-0a8e-a928d75d0ea4, 'name': SearchDatastore_Task, 'duration_secs': 0.009496} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.565722] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba0bd90f-cb6a-4b0c-8178-40665a1c20fd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.571712] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1221.571712] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524b33b4-f70b-6ebe-4b92-d5bb17f25d3d" [ 1221.571712] env[63538]: _type = "Task" [ 1221.571712] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.582026] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524b33b4-f70b-6ebe-4b92-d5bb17f25d3d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.807082] env[63538]: INFO nova.compute.manager [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Took 14.07 seconds to build instance. [ 1222.083822] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]524b33b4-f70b-6ebe-4b92-d5bb17f25d3d, 'name': SearchDatastore_Task, 'duration_secs': 0.011403} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.084039] env[63538]: DEBUG oslo_concurrency.lockutils [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1222.084287] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] c1766d8e-7949-4fa8-a762-007d016a4de1/c1766d8e-7949-4fa8-a762-007d016a4de1.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1222.084552] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9f4bea7e-e441-41cb-9f59-d3f1b19243b3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.091457] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1222.091457] env[63538]: value = "task-5101941" [ 1222.091457] env[63538]: _type = "Task" [ 1222.091457] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.099893] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101941, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.310168] env[63538]: DEBUG oslo_concurrency.lockutils [None req-477804dd-b122-4271-aeca-00630927bc12 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "e93aab2e-f8c4-4959-923f-0449a84108d6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.581s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1222.311469] env[63538]: DEBUG nova.compute.manager [req-e063b317-f208-48d9-8fdb-856990cae575 req-6ada843f-9139-4adc-8d67-91b5d0326597 service nova] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Received event network-changed-056940fe-0d12-4a3b-a2be-582e970e06bf {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1222.311671] env[63538]: DEBUG nova.compute.manager [req-e063b317-f208-48d9-8fdb-856990cae575 req-6ada843f-9139-4adc-8d67-91b5d0326597 service nova] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Refreshing instance network info cache due to event network-changed-056940fe-0d12-4a3b-a2be-582e970e06bf. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1222.311898] env[63538]: DEBUG oslo_concurrency.lockutils [req-e063b317-f208-48d9-8fdb-856990cae575 req-6ada843f-9139-4adc-8d67-91b5d0326597 service nova] Acquiring lock "refresh_cache-e93aab2e-f8c4-4959-923f-0449a84108d6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1222.312064] env[63538]: DEBUG oslo_concurrency.lockutils [req-e063b317-f208-48d9-8fdb-856990cae575 req-6ada843f-9139-4adc-8d67-91b5d0326597 service nova] Acquired lock "refresh_cache-e93aab2e-f8c4-4959-923f-0449a84108d6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1222.312255] env[63538]: DEBUG nova.network.neutron [req-e063b317-f208-48d9-8fdb-856990cae575 req-6ada843f-9139-4adc-8d67-91b5d0326597 service nova] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Refreshing network info cache for port 056940fe-0d12-4a3b-a2be-582e970e06bf {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1222.605618] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101941, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.498162} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.605994] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] c1766d8e-7949-4fa8-a762-007d016a4de1/c1766d8e-7949-4fa8-a762-007d016a4de1.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1222.606306] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1222.606650] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d8d07cef-91fa-42f3-b6d9-dc80e234c77f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.615139] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1222.615139] env[63538]: value = "task-5101942" [ 1222.615139] env[63538]: _type = "Task" [ 1222.615139] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.628796] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101942, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.005640] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b31d59-7a73-4f2f-81f7-49fab7ca6735 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.030514] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Updating instance 'f0183c1f-4557-45fd-ba65-4821ef661173' progress to 0 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1223.063162] env[63538]: DEBUG nova.network.neutron [req-e063b317-f208-48d9-8fdb-856990cae575 req-6ada843f-9139-4adc-8d67-91b5d0326597 service nova] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Updated VIF entry in instance network info cache for port 056940fe-0d12-4a3b-a2be-582e970e06bf. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1223.065215] env[63538]: DEBUG nova.network.neutron [req-e063b317-f208-48d9-8fdb-856990cae575 req-6ada843f-9139-4adc-8d67-91b5d0326597 service nova] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Updating instance_info_cache with network_info: [{"id": "056940fe-0d12-4a3b-a2be-582e970e06bf", "address": "fa:16:3e:0f:03:e4", "network": {"id": "4d592811-793b-45d6-a6d3-fff8ca7bbf30", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-654803980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df090f9a727d4cf4a0f466e27928bdc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap056940fe-0d", "ovs_interfaceid": "056940fe-0d12-4a3b-a2be-582e970e06bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1223.126602] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101942, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.272896} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.128104] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1223.128104] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e538cce9-9b6e-4edc-aa31-6aaea10f80d4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.150768] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] c1766d8e-7949-4fa8-a762-007d016a4de1/c1766d8e-7949-4fa8-a762-007d016a4de1.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1223.151381] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-589b54f2-2f31-4939-bb29-f7c7a7ef796a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.172565] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1223.172565] env[63538]: value = "task-5101943" [ 1223.172565] env[63538]: _type = "Task" [ 1223.172565] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.184341] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101943, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.536701] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1223.537058] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b2bd732a-476a-434d-9572-1c61ac5051ce {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.544304] env[63538]: DEBUG oslo_vmware.api [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1223.544304] env[63538]: value = "task-5101945" [ 1223.544304] env[63538]: _type = "Task" [ 1223.544304] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.554477] env[63538]: DEBUG oslo_vmware.api [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101945, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.566391] env[63538]: DEBUG oslo_concurrency.lockutils [req-e063b317-f208-48d9-8fdb-856990cae575 req-6ada843f-9139-4adc-8d67-91b5d0326597 service nova] Releasing lock "refresh_cache-e93aab2e-f8c4-4959-923f-0449a84108d6" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1223.684536] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101943, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.056949] env[63538]: DEBUG oslo_vmware.api [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101945, 'name': PowerOffVM_Task, 'duration_secs': 0.362043} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.057411] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1224.057525] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Updating instance 'f0183c1f-4557-45fd-ba65-4821ef661173' progress to 17 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1224.184634] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101943, 'name': ReconfigVM_Task, 'duration_secs': 0.846098} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.185808] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Reconfigured VM instance instance-00000078 to attach disk [datastore1] c1766d8e-7949-4fa8-a762-007d016a4de1/c1766d8e-7949-4fa8-a762-007d016a4de1.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1224.185808] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c822d2d8-501c-4b3f-9cab-6191c040f636 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.192943] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1224.192943] env[63538]: value = "task-5101946" [ 1224.192943] env[63538]: _type = "Task" [ 1224.192943] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.201425] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101946, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.565820] env[63538]: DEBUG nova.virt.hardware [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1224.566112] env[63538]: DEBUG nova.virt.hardware [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1224.566275] env[63538]: DEBUG nova.virt.hardware [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1224.566471] env[63538]: DEBUG nova.virt.hardware [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1224.566631] env[63538]: DEBUG nova.virt.hardware [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1224.566789] env[63538]: DEBUG nova.virt.hardware [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1224.567018] env[63538]: DEBUG nova.virt.hardware [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1224.567228] env[63538]: DEBUG nova.virt.hardware [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1224.567452] env[63538]: DEBUG nova.virt.hardware [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1224.567639] env[63538]: DEBUG nova.virt.hardware [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1224.567826] env[63538]: DEBUG nova.virt.hardware [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1224.572930] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab5024d8-b63e-42c4-86cd-3a7d46c2e65e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.590611] env[63538]: DEBUG oslo_vmware.api [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1224.590611] env[63538]: value = "task-5101947" [ 1224.590611] env[63538]: _type = "Task" [ 1224.590611] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.599517] env[63538]: DEBUG oslo_vmware.api [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101947, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.702958] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101946, 'name': Rename_Task, 'duration_secs': 0.485582} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.703296] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1224.703636] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-18dc3a66-795b-4a34-8a8f-aea74aa583ea {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.711252] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1224.711252] env[63538]: value = "task-5101948" [ 1224.711252] env[63538]: _type = "Task" [ 1224.711252] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.720280] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101948, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.100814] env[63538]: DEBUG oslo_vmware.api [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101947, 'name': ReconfigVM_Task, 'duration_secs': 0.304443} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.101318] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Updating instance 'f0183c1f-4557-45fd-ba65-4821ef661173' progress to 33 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1225.222184] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101948, 'name': PowerOnVM_Task} progress is 87%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.608429] env[63538]: DEBUG nova.virt.hardware [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1225.608757] env[63538]: DEBUG nova.virt.hardware [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1225.608942] env[63538]: DEBUG nova.virt.hardware [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1225.609099] env[63538]: DEBUG nova.virt.hardware [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1225.609262] env[63538]: DEBUG nova.virt.hardware [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1225.609418] env[63538]: DEBUG nova.virt.hardware [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1225.609670] env[63538]: DEBUG nova.virt.hardware [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1225.609828] env[63538]: DEBUG nova.virt.hardware [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1225.610015] env[63538]: DEBUG nova.virt.hardware [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1225.610199] env[63538]: DEBUG nova.virt.hardware [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1225.610395] env[63538]: DEBUG nova.virt.hardware [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1225.615934] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Reconfiguring VM instance instance-00000071 to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1225.616333] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-116f0e8d-85d3-4cf4-83c3-47db155a2606 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.640082] env[63538]: DEBUG oslo_vmware.api [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1225.640082] env[63538]: value = "task-5101949" [ 1225.640082] env[63538]: _type = "Task" [ 1225.640082] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.650118] env[63538]: DEBUG oslo_vmware.api [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101949, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.723689] env[63538]: DEBUG oslo_vmware.api [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101948, 'name': PowerOnVM_Task, 'duration_secs': 0.667936} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.723951] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1225.724213] env[63538]: INFO nova.compute.manager [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Took 9.04 seconds to spawn the instance on the hypervisor. [ 1225.724411] env[63538]: DEBUG nova.compute.manager [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1225.725300] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaac9286-ccd0-42b0-9aaf-992049b0ebc3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.152974] env[63538]: DEBUG oslo_vmware.api [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101949, 'name': ReconfigVM_Task, 'duration_secs': 0.197887} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.153369] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Reconfigured VM instance instance-00000071 to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1226.154103] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e322a16-d302-471d-b759-8d1093e720bf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.182394] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] f0183c1f-4557-45fd-ba65-4821ef661173/f0183c1f-4557-45fd-ba65-4821ef661173.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1226.182743] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88f4f0eb-3b5c-41ae-a2db-457390448e00 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.205256] env[63538]: DEBUG oslo_vmware.api [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1226.205256] env[63538]: value = "task-5101950" [ 1226.205256] env[63538]: _type = "Task" [ 1226.205256] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.215271] env[63538]: DEBUG oslo_vmware.api [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101950, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.244491] env[63538]: INFO nova.compute.manager [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Took 17.14 seconds to build instance. [ 1226.717888] env[63538]: DEBUG oslo_vmware.api [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101950, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.746677] env[63538]: DEBUG oslo_concurrency.lockutils [None req-35ac8b08-9435-48e8-930f-9ba89a52858e tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "c1766d8e-7949-4fa8-a762-007d016a4de1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.652s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.053300] env[63538]: DEBUG nova.compute.manager [req-221bef4e-d1fc-483f-a4ad-19dcbc8620d9 req-6e6aa0cd-121f-40b5-9a4f-edd27b988f1b service nova] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Received event network-changed-632e844e-c3bd-4596-8708-86ed6e0abef8 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1227.053519] env[63538]: DEBUG nova.compute.manager [req-221bef4e-d1fc-483f-a4ad-19dcbc8620d9 req-6e6aa0cd-121f-40b5-9a4f-edd27b988f1b service nova] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Refreshing instance network info cache due to event network-changed-632e844e-c3bd-4596-8708-86ed6e0abef8. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1227.053741] env[63538]: DEBUG oslo_concurrency.lockutils [req-221bef4e-d1fc-483f-a4ad-19dcbc8620d9 req-6e6aa0cd-121f-40b5-9a4f-edd27b988f1b service nova] Acquiring lock "refresh_cache-c1766d8e-7949-4fa8-a762-007d016a4de1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1227.053894] env[63538]: DEBUG oslo_concurrency.lockutils [req-221bef4e-d1fc-483f-a4ad-19dcbc8620d9 req-6e6aa0cd-121f-40b5-9a4f-edd27b988f1b service nova] Acquired lock "refresh_cache-c1766d8e-7949-4fa8-a762-007d016a4de1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.054075] env[63538]: DEBUG nova.network.neutron [req-221bef4e-d1fc-483f-a4ad-19dcbc8620d9 req-6e6aa0cd-121f-40b5-9a4f-edd27b988f1b service nova] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Refreshing network info cache for port 632e844e-c3bd-4596-8708-86ed6e0abef8 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1227.216387] env[63538]: DEBUG oslo_vmware.api [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101950, 'name': ReconfigVM_Task, 'duration_secs': 0.580336} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.216760] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Reconfigured VM instance instance-00000071 to attach disk [datastore1] f0183c1f-4557-45fd-ba65-4821ef661173/f0183c1f-4557-45fd-ba65-4821ef661173.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1227.216937] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Updating instance 'f0183c1f-4557-45fd-ba65-4821ef661173' progress to 50 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1227.724882] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69bd22d2-88eb-47e1-9667-6f74b55268f6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.750273] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c45799-3a82-41d8-83e7-d61c0572f0bf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.773475] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Updating instance 'f0183c1f-4557-45fd-ba65-4821ef661173' progress to 67 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1227.817115] env[63538]: DEBUG nova.network.neutron [req-221bef4e-d1fc-483f-a4ad-19dcbc8620d9 req-6e6aa0cd-121f-40b5-9a4f-edd27b988f1b service nova] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Updated VIF entry in instance network info cache for port 632e844e-c3bd-4596-8708-86ed6e0abef8. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1227.817563] env[63538]: DEBUG nova.network.neutron [req-221bef4e-d1fc-483f-a4ad-19dcbc8620d9 req-6e6aa0cd-121f-40b5-9a4f-edd27b988f1b service nova] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Updating instance_info_cache with network_info: [{"id": "632e844e-c3bd-4596-8708-86ed6e0abef8", "address": "fa:16:3e:dc:12:67", "network": {"id": "64d377b2-188d-4b62-8b84-64d9c351ca10", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1354632003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b1eba931f144b94b6e186dac1310dfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap632e844e-c3", "ovs_interfaceid": "632e844e-c3bd-4596-8708-86ed6e0abef8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.320788] env[63538]: DEBUG oslo_concurrency.lockutils [req-221bef4e-d1fc-483f-a4ad-19dcbc8620d9 req-6e6aa0cd-121f-40b5-9a4f-edd27b988f1b service nova] Releasing lock "refresh_cache-c1766d8e-7949-4fa8-a762-007d016a4de1" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1229.435882] env[63538]: DEBUG nova.network.neutron [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Port 9f09c892-0333-4063-a5da-daa3e2bf19f5 binding to destination host cpu-1 is already ACTIVE {{(pid=63538) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1230.460587] env[63538]: DEBUG oslo_concurrency.lockutils [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "f0183c1f-4557-45fd-ba65-4821ef661173-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1230.460963] env[63538]: DEBUG oslo_concurrency.lockutils [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "f0183c1f-4557-45fd-ba65-4821ef661173-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.461049] env[63538]: DEBUG oslo_concurrency.lockutils [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "f0183c1f-4557-45fd-ba65-4821ef661173-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1231.498286] env[63538]: DEBUG oslo_concurrency.lockutils [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "refresh_cache-f0183c1f-4557-45fd-ba65-4821ef661173" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1231.498566] env[63538]: DEBUG oslo_concurrency.lockutils [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquired lock "refresh_cache-f0183c1f-4557-45fd-ba65-4821ef661173" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1231.498677] env[63538]: DEBUG nova.network.neutron [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1232.220220] env[63538]: DEBUG nova.network.neutron [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Updating instance_info_cache with network_info: [{"id": "9f09c892-0333-4063-a5da-daa3e2bf19f5", "address": "fa:16:3e:68:a4:9f", "network": {"id": "690e6150-6e85-472d-baeb-85e69afd037b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1831468396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a06b7cc1ab24ba584bbe970e4fc5e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f09c892-03", "ovs_interfaceid": "9f09c892-0333-4063-a5da-daa3e2bf19f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1232.723542] env[63538]: DEBUG oslo_concurrency.lockutils [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Releasing lock "refresh_cache-f0183c1f-4557-45fd-ba65-4821ef661173" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1233.233727] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a62f55e-3dc9-45bb-9c6a-3a5a1ea723bb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.241605] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b12de81-328b-4e4f-8fa7-1085b54776f3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.343655] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8065abb9-67da-4374-a7d1-5fa6418ec4e7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.366435] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d803ebf-94b8-486b-911c-cf3a1807b31b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.374713] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Updating instance 'f0183c1f-4557-45fd-ba65-4821ef661173' progress to 83 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1234.881282] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1234.881615] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea47da03-229b-42a0-9b0b-0be3f19c5f4e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.889223] env[63538]: DEBUG oslo_vmware.api [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1234.889223] env[63538]: value = "task-5101956" [ 1234.889223] env[63538]: _type = "Task" [ 1234.889223] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.897726] env[63538]: DEBUG oslo_vmware.api [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101956, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.399179] env[63538]: DEBUG oslo_vmware.api [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101956, 'name': PowerOnVM_Task, 'duration_secs': 0.387529} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.399581] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1235.399659] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-50cfc046-6c39-439d-89ad-de7cdb216b85 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Updating instance 'f0183c1f-4557-45fd-ba65-4821ef661173' progress to 100 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1237.900025] env[63538]: DEBUG nova.network.neutron [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Port 9f09c892-0333-4063-a5da-daa3e2bf19f5 binding to destination host cpu-1 is already ACTIVE {{(pid=63538) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1237.900025] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "refresh_cache-f0183c1f-4557-45fd-ba65-4821ef661173" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1237.900383] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquired lock "refresh_cache-f0183c1f-4557-45fd-ba65-4821ef661173" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.900383] env[63538]: DEBUG nova.network.neutron [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1238.687904] env[63538]: DEBUG nova.network.neutron [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Updating instance_info_cache with network_info: [{"id": "9f09c892-0333-4063-a5da-daa3e2bf19f5", "address": "fa:16:3e:68:a4:9f", "network": {"id": "690e6150-6e85-472d-baeb-85e69afd037b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1831468396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a06b7cc1ab24ba584bbe970e4fc5e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f09c892-03", "ovs_interfaceid": "9f09c892-0333-4063-a5da-daa3e2bf19f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.191062] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Releasing lock "refresh_cache-f0183c1f-4557-45fd-ba65-4821ef661173" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1239.695053] env[63538]: DEBUG nova.compute.manager [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63538) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 1240.794883] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1240.795192] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1241.298623] env[63538]: DEBUG nova.objects.instance [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lazy-loading 'migration_context' on Instance uuid f0183c1f-4557-45fd-ba65-4821ef661173 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1241.912871] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71cb1fbb-a516-42bf-8389-44a7412debd0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.921534] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d215b122-6874-4698-a6b9-2d22d97730cd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.953255] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ae85e50-b729-4834-9e63-afbaaf9e7f56 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.961113] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b00239ae-194b-4d26-896b-652c8038cb31 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.975543] env[63538]: DEBUG nova.compute.provider_tree [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1242.478499] env[63538]: DEBUG nova.scheduler.client.report [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1242.767031] env[63538]: DEBUG oslo_concurrency.lockutils [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "afa669ca-26b3-4b9d-ac9d-abbc966d5798" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1242.767218] env[63538]: DEBUG oslo_concurrency.lockutils [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "afa669ca-26b3-4b9d-ac9d-abbc966d5798" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1243.269652] env[63538]: DEBUG nova.compute.manager [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1243.490304] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.695s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1243.789846] env[63538]: DEBUG oslo_concurrency.lockutils [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1243.790153] env[63538]: DEBUG oslo_concurrency.lockutils [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1243.791689] env[63538]: INFO nova.compute.claims [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1244.914169] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f32fd55-43c1-45e2-a948-b37f73d20ea0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.922100] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08702a4e-2472-45f4-86b4-cd50d8362722 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.951637] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45181d1c-2078-4a70-a81f-4dcd92ff685b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.959662] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-553f1a2d-6061-4cd8-83bc-6067e12ac902 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.973249] env[63538]: DEBUG nova.compute.provider_tree [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1245.023486] env[63538]: INFO nova.compute.manager [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Swapping old allocation on dict_keys(['f65218a4-1d3d-476a-9093-01cae92c8635']) held by migration d6e3f498-ebbd-4c3e-8822-e3b13a93f7c8 for instance [ 1245.049102] env[63538]: DEBUG nova.scheduler.client.report [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Overwriting current allocation {'allocations': {'f65218a4-1d3d-476a-9093-01cae92c8635': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 170}}, 'project_id': '1a06b7cc1ab24ba584bbe970e4fc5e81', 'user_id': '5ad1bddeca5346dea39d23339e09db3d', 'consumer_generation': 1} on consumer f0183c1f-4557-45fd-ba65-4821ef661173 {{(pid=63538) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1245.128923] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "refresh_cache-f0183c1f-4557-45fd-ba65-4821ef661173" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1245.129171] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquired lock "refresh_cache-f0183c1f-4557-45fd-ba65-4821ef661173" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1245.129361] env[63538]: DEBUG nova.network.neutron [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1245.476079] env[63538]: DEBUG nova.scheduler.client.report [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1245.859902] env[63538]: DEBUG nova.network.neutron [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Updating instance_info_cache with network_info: [{"id": "9f09c892-0333-4063-a5da-daa3e2bf19f5", "address": "fa:16:3e:68:a4:9f", "network": {"id": "690e6150-6e85-472d-baeb-85e69afd037b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1831468396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a06b7cc1ab24ba584bbe970e4fc5e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f09c892-03", "ovs_interfaceid": "9f09c892-0333-4063-a5da-daa3e2bf19f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1245.980951] env[63538]: DEBUG oslo_concurrency.lockutils [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.191s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1245.981362] env[63538]: DEBUG nova.compute.manager [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1246.363880] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Releasing lock "refresh_cache-f0183c1f-4557-45fd-ba65-4821ef661173" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1246.364152] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fb50f1d-66a8-4729-b032-a27412b0154e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.371663] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb8eded8-ff8b-4be3-aacb-9afefb9fd136 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.486913] env[63538]: DEBUG nova.compute.utils [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1246.488597] env[63538]: DEBUG nova.compute.manager [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1246.488773] env[63538]: DEBUG nova.network.neutron [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1246.533776] env[63538]: DEBUG nova.policy [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f315670d336b49d6a732297656ce515a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df090f9a727d4cf4a0f466e27928bdc6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1246.797542] env[63538]: DEBUG nova.network.neutron [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Successfully created port: ec3a3da3-8713-4358-bd22-4e497a90c904 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1246.992488] env[63538]: DEBUG nova.compute.manager [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1247.402610] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1247.402910] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.403168] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1247.403350] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.403532] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1247.405860] env[63538]: INFO nova.compute.manager [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Terminating instance [ 1247.408379] env[63538]: DEBUG nova.compute.manager [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1247.408600] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1247.409489] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c636ea0-ed74-4eed-9901-30ef500b4c59 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.417421] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1247.417692] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f80bafe-f638-4bbc-8f27-1e606336b3c6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.425044] env[63538]: DEBUG oslo_vmware.api [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1247.425044] env[63538]: value = "task-5101959" [ 1247.425044] env[63538]: _type = "Task" [ 1247.425044] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.436558] env[63538]: DEBUG oslo_vmware.api [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101959, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.466044] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1247.466044] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-906f9ed6-beaa-4c08-85b7-66bc509e52ae {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.473916] env[63538]: DEBUG oslo_vmware.api [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1247.473916] env[63538]: value = "task-5101960" [ 1247.473916] env[63538]: _type = "Task" [ 1247.473916] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.483310] env[63538]: DEBUG oslo_vmware.api [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101960, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.497703] env[63538]: INFO nova.virt.block_device [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Booting with volume f4f9e20f-58de-40de-b281-a78c4a4444b2 at /dev/sda [ 1247.542869] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3c7e0e46-947e-474c-82b2-cabd2b98cd9d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.556097] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb1cdcbd-b559-4b7b-8218-d816cfc4e332 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.598949] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-044e8bda-ff6c-45ad-993d-d5f132a12d70 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.608957] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b9208ed-6866-4c8c-9984-3b3e10f127f3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.645071] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26bd3c86-9734-48bc-909d-12ff0bceccc2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.653060] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ce0a794-b738-40dd-b552-6e7004eed4d6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.668696] env[63538]: DEBUG nova.virt.block_device [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Updating existing volume attachment record: 6c566560-055e-4c55-9009-c09b3f55f604 {{(pid=63538) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1247.936164] env[63538]: DEBUG oslo_vmware.api [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101959, 'name': PowerOffVM_Task, 'duration_secs': 0.264525} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.936460] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1247.936642] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1247.936920] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e2838525-7549-4570-9e12-a106d8bdf472 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.983622] env[63538]: DEBUG oslo_vmware.api [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101960, 'name': PowerOffVM_Task, 'duration_secs': 0.21716} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.983867] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1247.984580] env[63538]: DEBUG nova.virt.hardware [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1247.984813] env[63538]: DEBUG nova.virt.hardware [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1247.984980] env[63538]: DEBUG nova.virt.hardware [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1247.985206] env[63538]: DEBUG nova.virt.hardware [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1247.985403] env[63538]: DEBUG nova.virt.hardware [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1247.985572] env[63538]: DEBUG nova.virt.hardware [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1247.985793] env[63538]: DEBUG nova.virt.hardware [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1247.986082] env[63538]: DEBUG nova.virt.hardware [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1247.986237] env[63538]: DEBUG nova.virt.hardware [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1247.986414] env[63538]: DEBUG nova.virt.hardware [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1247.986610] env[63538]: DEBUG nova.virt.hardware [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1247.991632] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f9f4b78-352f-4081-a350-62dc51fac919 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.003681] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1248.004087] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1248.004087] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Deleting the datastore file [datastore1] d00151c1-ca34-4c57-9ed2-74d506a0cffb {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1248.004349] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-90f0013c-ad16-4381-b64b-6fe84fe0291e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.009934] env[63538]: DEBUG oslo_vmware.api [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1248.009934] env[63538]: value = "task-5101962" [ 1248.009934] env[63538]: _type = "Task" [ 1248.009934] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.012345] env[63538]: DEBUG oslo_vmware.api [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for the task: (returnval){ [ 1248.012345] env[63538]: value = "task-5101963" [ 1248.012345] env[63538]: _type = "Task" [ 1248.012345] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.027055] env[63538]: DEBUG oslo_vmware.api [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101962, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.027345] env[63538]: DEBUG oslo_vmware.api [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101963, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.272998] env[63538]: DEBUG nova.compute.manager [req-fca5744c-748a-40bb-a90c-5f6ffb7e7714 req-92cf3310-930f-4a35-b0c9-e8870298d3fe service nova] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Received event network-vif-plugged-ec3a3da3-8713-4358-bd22-4e497a90c904 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1248.273285] env[63538]: DEBUG oslo_concurrency.lockutils [req-fca5744c-748a-40bb-a90c-5f6ffb7e7714 req-92cf3310-930f-4a35-b0c9-e8870298d3fe service nova] Acquiring lock "afa669ca-26b3-4b9d-ac9d-abbc966d5798-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1248.273513] env[63538]: DEBUG oslo_concurrency.lockutils [req-fca5744c-748a-40bb-a90c-5f6ffb7e7714 req-92cf3310-930f-4a35-b0c9-e8870298d3fe service nova] Lock "afa669ca-26b3-4b9d-ac9d-abbc966d5798-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1248.273696] env[63538]: DEBUG oslo_concurrency.lockutils [req-fca5744c-748a-40bb-a90c-5f6ffb7e7714 req-92cf3310-930f-4a35-b0c9-e8870298d3fe service nova] Lock "afa669ca-26b3-4b9d-ac9d-abbc966d5798-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.273876] env[63538]: DEBUG nova.compute.manager [req-fca5744c-748a-40bb-a90c-5f6ffb7e7714 req-92cf3310-930f-4a35-b0c9-e8870298d3fe service nova] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] No waiting events found dispatching network-vif-plugged-ec3a3da3-8713-4358-bd22-4e497a90c904 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1248.274063] env[63538]: WARNING nova.compute.manager [req-fca5744c-748a-40bb-a90c-5f6ffb7e7714 req-92cf3310-930f-4a35-b0c9-e8870298d3fe service nova] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Received unexpected event network-vif-plugged-ec3a3da3-8713-4358-bd22-4e497a90c904 for instance with vm_state building and task_state block_device_mapping. [ 1248.356595] env[63538]: DEBUG nova.network.neutron [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Successfully updated port: ec3a3da3-8713-4358-bd22-4e497a90c904 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1248.530301] env[63538]: DEBUG oslo_vmware.api [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101962, 'name': ReconfigVM_Task, 'duration_secs': 0.172905} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.530600] env[63538]: DEBUG oslo_vmware.api [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Task: {'id': task-5101963, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142242} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.530976] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1248.531142] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1248.531312] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1248.531494] env[63538]: INFO nova.compute.manager [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1248.531752] env[63538]: DEBUG oslo.service.loopingcall [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1248.532504] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18d8a24a-e0cc-43b2-b57e-e5600436b9ee {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.535455] env[63538]: DEBUG nova.compute.manager [-] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1248.535815] env[63538]: DEBUG nova.network.neutron [-] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1248.557169] env[63538]: DEBUG nova.virt.hardware [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1248.557478] env[63538]: DEBUG nova.virt.hardware [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1248.557639] env[63538]: DEBUG nova.virt.hardware [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1248.557976] env[63538]: DEBUG nova.virt.hardware [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1248.557976] env[63538]: DEBUG nova.virt.hardware [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1248.558459] env[63538]: DEBUG nova.virt.hardware [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1248.558459] env[63538]: DEBUG nova.virt.hardware [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1248.558566] env[63538]: DEBUG nova.virt.hardware [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1248.558719] env[63538]: DEBUG nova.virt.hardware [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1248.558893] env[63538]: DEBUG nova.virt.hardware [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1248.559097] env[63538]: DEBUG nova.virt.hardware [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1248.560076] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7b7b41b-7db7-46d3-981a-ab77f3eac58a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.566722] env[63538]: DEBUG oslo_vmware.api [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1248.566722] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bc4bd0-537c-cdf1-c090-cac46a014088" [ 1248.566722] env[63538]: _type = "Task" [ 1248.566722] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.577024] env[63538]: DEBUG oslo_vmware.api [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52bc4bd0-537c-cdf1-c090-cac46a014088, 'name': SearchDatastore_Task, 'duration_secs': 0.008064} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.582744] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Reconfiguring VM instance instance-00000071 to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1248.582744] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5078bda8-47b3-47d5-a694-0b230a34b4df {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.602342] env[63538]: DEBUG oslo_vmware.api [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1248.602342] env[63538]: value = "task-5101964" [ 1248.602342] env[63538]: _type = "Task" [ 1248.602342] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.611354] env[63538]: DEBUG oslo_vmware.api [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101964, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.862032] env[63538]: DEBUG oslo_concurrency.lockutils [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "refresh_cache-afa669ca-26b3-4b9d-ac9d-abbc966d5798" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1248.862032] env[63538]: DEBUG oslo_concurrency.lockutils [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquired lock "refresh_cache-afa669ca-26b3-4b9d-ac9d-abbc966d5798" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.862032] env[63538]: DEBUG nova.network.neutron [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1249.113816] env[63538]: DEBUG oslo_vmware.api [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101964, 'name': ReconfigVM_Task, 'duration_secs': 0.457237} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.114189] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Reconfigured VM instance instance-00000071 to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1249.115064] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fdf4437-7987-447e-96bf-cf6d3c5675e8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.143025] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] f0183c1f-4557-45fd-ba65-4821ef661173/f0183c1f-4557-45fd-ba65-4821ef661173.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1249.144165] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ba0f8df-295a-4d5f-8cb4-8ddbb105aec1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.168935] env[63538]: DEBUG oslo_vmware.api [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1249.168935] env[63538]: value = "task-5101965" [ 1249.168935] env[63538]: _type = "Task" [ 1249.168935] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.181948] env[63538]: DEBUG oslo_vmware.api [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101965, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.411456] env[63538]: DEBUG nova.network.neutron [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1249.586216] env[63538]: DEBUG nova.network.neutron [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Updating instance_info_cache with network_info: [{"id": "ec3a3da3-8713-4358-bd22-4e497a90c904", "address": "fa:16:3e:76:73:41", "network": {"id": "4d592811-793b-45d6-a6d3-fff8ca7bbf30", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-654803980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df090f9a727d4cf4a0f466e27928bdc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec3a3da3-87", "ovs_interfaceid": "ec3a3da3-8713-4358-bd22-4e497a90c904", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1249.680191] env[63538]: DEBUG oslo_vmware.api [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101965, 'name': ReconfigVM_Task, 'duration_secs': 0.347359} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.680191] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Reconfigured VM instance instance-00000071 to attach disk [datastore1] f0183c1f-4557-45fd-ba65-4821ef661173/f0183c1f-4557-45fd-ba65-4821ef661173.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1249.681054] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3c1cdc-e76d-47f4-8d3f-58e2577a162d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.705057] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71709ace-f3e1-41f9-943a-c07ba47c0635 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.727108] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ba6f0e-48ff-4a3e-b558-d1741443f22b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.750201] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681a6344-adee-47a2-a5ca-cb03064849cd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.758672] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1249.758955] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45bd9bf8-1701-4c7a-828c-b066d55f3aed {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.766564] env[63538]: DEBUG oslo_vmware.api [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1249.766564] env[63538]: value = "task-5101966" [ 1249.766564] env[63538]: _type = "Task" [ 1249.766564] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.776878] env[63538]: DEBUG oslo_vmware.api [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101966, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.785437] env[63538]: DEBUG nova.compute.manager [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1249.786048] env[63538]: DEBUG nova.virt.hardware [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1249.786290] env[63538]: DEBUG nova.virt.hardware [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1249.786561] env[63538]: DEBUG nova.virt.hardware [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1249.786652] env[63538]: DEBUG nova.virt.hardware [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1249.786791] env[63538]: DEBUG nova.virt.hardware [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1249.786945] env[63538]: DEBUG nova.virt.hardware [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1249.787175] env[63538]: DEBUG nova.virt.hardware [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1249.787343] env[63538]: DEBUG nova.virt.hardware [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1249.787612] env[63538]: DEBUG nova.virt.hardware [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1249.788123] env[63538]: DEBUG nova.virt.hardware [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1249.788123] env[63538]: DEBUG nova.virt.hardware [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1249.789276] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b866c0-5d7f-4254-b70c-331743f0911f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.798831] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a8e3be-0d36-43ce-ad44-381c0e816693 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.922138] env[63538]: DEBUG nova.network.neutron [-] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.089687] env[63538]: DEBUG oslo_concurrency.lockutils [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Releasing lock "refresh_cache-afa669ca-26b3-4b9d-ac9d-abbc966d5798" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1250.089881] env[63538]: DEBUG nova.compute.manager [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Instance network_info: |[{"id": "ec3a3da3-8713-4358-bd22-4e497a90c904", "address": "fa:16:3e:76:73:41", "network": {"id": "4d592811-793b-45d6-a6d3-fff8ca7bbf30", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-654803980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df090f9a727d4cf4a0f466e27928bdc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec3a3da3-87", "ovs_interfaceid": "ec3a3da3-8713-4358-bd22-4e497a90c904", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1250.090321] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:73:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c621a9c-66f5-426a-8aab-bd8b2e912106', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ec3a3da3-8713-4358-bd22-4e497a90c904', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1250.099726] env[63538]: DEBUG oslo.service.loopingcall [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1250.099726] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1250.099965] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b903627b-292c-4fb2-9443-52630389552d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.123265] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1250.123265] env[63538]: value = "task-5101967" [ 1250.123265] env[63538]: _type = "Task" [ 1250.123265] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.137035] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101967, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.278482] env[63538]: DEBUG oslo_vmware.api [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101966, 'name': PowerOnVM_Task, 'duration_secs': 0.474106} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.278829] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1250.311917] env[63538]: DEBUG nova.compute.manager [req-806ccd05-012d-4dd8-a29e-06f4252cb75f req-ce09e025-bc3c-4094-9e00-43cf42b42527 service nova] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Received event network-changed-ec3a3da3-8713-4358-bd22-4e497a90c904 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1250.312195] env[63538]: DEBUG nova.compute.manager [req-806ccd05-012d-4dd8-a29e-06f4252cb75f req-ce09e025-bc3c-4094-9e00-43cf42b42527 service nova] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Refreshing instance network info cache due to event network-changed-ec3a3da3-8713-4358-bd22-4e497a90c904. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1250.312520] env[63538]: DEBUG oslo_concurrency.lockutils [req-806ccd05-012d-4dd8-a29e-06f4252cb75f req-ce09e025-bc3c-4094-9e00-43cf42b42527 service nova] Acquiring lock "refresh_cache-afa669ca-26b3-4b9d-ac9d-abbc966d5798" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1250.312740] env[63538]: DEBUG oslo_concurrency.lockutils [req-806ccd05-012d-4dd8-a29e-06f4252cb75f req-ce09e025-bc3c-4094-9e00-43cf42b42527 service nova] Acquired lock "refresh_cache-afa669ca-26b3-4b9d-ac9d-abbc966d5798" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.312984] env[63538]: DEBUG nova.network.neutron [req-806ccd05-012d-4dd8-a29e-06f4252cb75f req-ce09e025-bc3c-4094-9e00-43cf42b42527 service nova] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Refreshing network info cache for port ec3a3da3-8713-4358-bd22-4e497a90c904 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1250.425318] env[63538]: INFO nova.compute.manager [-] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Took 1.89 seconds to deallocate network for instance. [ 1250.633305] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101967, 'name': CreateVM_Task, 'duration_secs': 0.447919} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.633518] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1250.634144] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992559', 'volume_id': 'f4f9e20f-58de-40de-b281-a78c4a4444b2', 'name': 'volume-f4f9e20f-58de-40de-b281-a78c4a4444b2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'afa669ca-26b3-4b9d-ac9d-abbc966d5798', 'attached_at': '', 'detached_at': '', 'volume_id': 'f4f9e20f-58de-40de-b281-a78c4a4444b2', 'serial': 'f4f9e20f-58de-40de-b281-a78c4a4444b2'}, 'delete_on_termination': True, 'attachment_id': '6c566560-055e-4c55-9009-c09b3f55f604', 'guest_format': None, 'mount_device': '/dev/sda', 'device_type': None, 'boot_index': 0, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=63538) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1250.634370] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Root volume attach. Driver type: vmdk {{(pid=63538) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1250.635148] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17356a96-ffd1-400d-9a62-95fdd2e55524 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.643102] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2635177b-df81-4a39-9890-a0225f37e83d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.649436] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcbf8ccd-4e68-4930-80e7-a01f53c445d9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.655749] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-330fcb96-72b5-419e-b4fe-b5236c24a655 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.663170] env[63538]: DEBUG oslo_vmware.api [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1250.663170] env[63538]: value = "task-5101968" [ 1250.663170] env[63538]: _type = "Task" [ 1250.663170] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.677736] env[63538]: DEBUG oslo_vmware.api [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101968, 'name': RelocateVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.934292] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1250.934499] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1250.934748] env[63538]: DEBUG nova.objects.instance [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lazy-loading 'resources' on Instance uuid d00151c1-ca34-4c57-9ed2-74d506a0cffb {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1251.049844] env[63538]: DEBUG nova.network.neutron [req-806ccd05-012d-4dd8-a29e-06f4252cb75f req-ce09e025-bc3c-4094-9e00-43cf42b42527 service nova] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Updated VIF entry in instance network info cache for port ec3a3da3-8713-4358-bd22-4e497a90c904. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1251.050243] env[63538]: DEBUG nova.network.neutron [req-806ccd05-012d-4dd8-a29e-06f4252cb75f req-ce09e025-bc3c-4094-9e00-43cf42b42527 service nova] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Updating instance_info_cache with network_info: [{"id": "ec3a3da3-8713-4358-bd22-4e497a90c904", "address": "fa:16:3e:76:73:41", "network": {"id": "4d592811-793b-45d6-a6d3-fff8ca7bbf30", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-654803980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df090f9a727d4cf4a0f466e27928bdc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec3a3da3-87", "ovs_interfaceid": "ec3a3da3-8713-4358-bd22-4e497a90c904", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1251.172961] env[63538]: DEBUG oslo_vmware.api [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101968, 'name': RelocateVM_Task, 'duration_secs': 0.401554} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.173347] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Volume attach. Driver type: vmdk {{(pid=63538) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1251.173423] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992559', 'volume_id': 'f4f9e20f-58de-40de-b281-a78c4a4444b2', 'name': 'volume-f4f9e20f-58de-40de-b281-a78c4a4444b2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'afa669ca-26b3-4b9d-ac9d-abbc966d5798', 'attached_at': '', 'detached_at': '', 'volume_id': 'f4f9e20f-58de-40de-b281-a78c4a4444b2', 'serial': 'f4f9e20f-58de-40de-b281-a78c4a4444b2'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1251.174228] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88692580-064b-4182-8d80-01ffacf44477 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.190277] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-320bb2da-abcf-4b00-90a5-552aa8ceefb5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.214428] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Reconfiguring VM instance instance-00000079 to attach disk [datastore2] volume-f4f9e20f-58de-40de-b281-a78c4a4444b2/volume-f4f9e20f-58de-40de-b281-a78c4a4444b2.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1251.214773] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e63f6d48-9579-4d28-aaf1-db29682d1478 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.235660] env[63538]: DEBUG oslo_vmware.api [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1251.235660] env[63538]: value = "task-5101969" [ 1251.235660] env[63538]: _type = "Task" [ 1251.235660] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.244344] env[63538]: DEBUG oslo_vmware.api [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101969, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.329683] env[63538]: INFO nova.compute.manager [None req-a10b17a8-6dd8-4750-9316-481e14ec50bb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Updating instance to original state: 'active' [ 1251.553378] env[63538]: DEBUG oslo_concurrency.lockutils [req-806ccd05-012d-4dd8-a29e-06f4252cb75f req-ce09e025-bc3c-4094-9e00-43cf42b42527 service nova] Releasing lock "refresh_cache-afa669ca-26b3-4b9d-ac9d-abbc966d5798" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1251.553588] env[63538]: DEBUG nova.compute.manager [req-806ccd05-012d-4dd8-a29e-06f4252cb75f req-ce09e025-bc3c-4094-9e00-43cf42b42527 service nova] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Received event network-vif-deleted-319cec1c-49eb-43a4-a9ec-6b74a507b6d6 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1251.555429] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c6c4af6-f4b2-4e9b-aed7-ad2ab7ecf2ac {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.563407] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6701542-6a90-4645-97b9-961767ce8498 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.593932] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1888d5ec-8c6d-468b-83fb-0a603ff4beae {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.602232] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc83fc19-608d-452f-844b-c7f5709b935b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.616822] env[63538]: DEBUG nova.compute.provider_tree [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1251.746316] env[63538]: DEBUG oslo_vmware.api [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101969, 'name': ReconfigVM_Task, 'duration_secs': 0.263789} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.746538] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Reconfigured VM instance instance-00000079 to attach disk [datastore2] volume-f4f9e20f-58de-40de-b281-a78c4a4444b2/volume-f4f9e20f-58de-40de-b281-a78c4a4444b2.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1251.751338] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6cc86821-9c59-4d9f-9689-9b6b95740f47 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.767781] env[63538]: DEBUG oslo_vmware.api [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1251.767781] env[63538]: value = "task-5101970" [ 1251.767781] env[63538]: _type = "Task" [ 1251.767781] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.777346] env[63538]: DEBUG oslo_vmware.api [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101970, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.120328] env[63538]: DEBUG nova.scheduler.client.report [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1252.278968] env[63538]: DEBUG oslo_vmware.api [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101970, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.627241] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.692s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1252.672279] env[63538]: INFO nova.scheduler.client.report [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Deleted allocations for instance d00151c1-ca34-4c57-9ed2-74d506a0cffb [ 1252.780923] env[63538]: DEBUG oslo_vmware.api [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101970, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.181373] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8fd5f229-bd90-4f58-b0e1-6e5c73133f33 tempest-AttachVolumeShelveTestJSON-1945709859 tempest-AttachVolumeShelveTestJSON-1945709859-project-member] Lock "d00151c1-ca34-4c57-9ed2-74d506a0cffb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.778s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1253.225907] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "f0183c1f-4557-45fd-ba65-4821ef661173" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1253.226205] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "f0183c1f-4557-45fd-ba65-4821ef661173" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1253.227102] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "f0183c1f-4557-45fd-ba65-4821ef661173-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1253.227229] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "f0183c1f-4557-45fd-ba65-4821ef661173-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1253.227458] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "f0183c1f-4557-45fd-ba65-4821ef661173-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1253.229588] env[63538]: INFO nova.compute.manager [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Terminating instance [ 1253.231824] env[63538]: DEBUG nova.compute.manager [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1253.232104] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1253.232377] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a389c870-8ade-42b2-ad9d-55f3497929ca {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.241149] env[63538]: DEBUG oslo_vmware.api [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1253.241149] env[63538]: value = "task-5101971" [ 1253.241149] env[63538]: _type = "Task" [ 1253.241149] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.250940] env[63538]: DEBUG oslo_vmware.api [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101971, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.279400] env[63538]: DEBUG oslo_vmware.api [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101970, 'name': ReconfigVM_Task, 'duration_secs': 1.130201} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.279846] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992559', 'volume_id': 'f4f9e20f-58de-40de-b281-a78c4a4444b2', 'name': 'volume-f4f9e20f-58de-40de-b281-a78c4a4444b2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'afa669ca-26b3-4b9d-ac9d-abbc966d5798', 'attached_at': '', 'detached_at': '', 'volume_id': 'f4f9e20f-58de-40de-b281-a78c4a4444b2', 'serial': 'f4f9e20f-58de-40de-b281-a78c4a4444b2'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1253.280400] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2730cf96-8df5-44cd-95df-ddf96916a032 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.287667] env[63538]: DEBUG oslo_vmware.api [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1253.287667] env[63538]: value = "task-5101972" [ 1253.287667] env[63538]: _type = "Task" [ 1253.287667] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.297604] env[63538]: DEBUG oslo_vmware.api [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101972, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.751466] env[63538]: DEBUG oslo_vmware.api [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101971, 'name': PowerOffVM_Task, 'duration_secs': 0.203246} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.751736] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1253.751939] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Volume detach. Driver type: vmdk {{(pid=63538) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1253.752156] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992554', 'volume_id': '8dce72fb-6084-4a8a-8e72-83746e434be6', 'name': 'volume-8dce72fb-6084-4a8a-8e72-83746e434be6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'f0183c1f-4557-45fd-ba65-4821ef661173', 'attached_at': '2025-12-12T13:01:54.000000', 'detached_at': '', 'volume_id': '8dce72fb-6084-4a8a-8e72-83746e434be6', 'serial': '8dce72fb-6084-4a8a-8e72-83746e434be6'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1253.754061] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1272378-7895-4fc6-b023-da2176c20e22 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.776199] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0576d0c6-2831-45d1-847c-7c4ac6b329a2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.785493] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb09ed56-67c0-40c9-8b34-d021709fee97 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.796131] env[63538]: DEBUG oslo_vmware.api [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101972, 'name': Rename_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.812078] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f8fb80-bc10-4914-97a9-067461aafd80 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.827569] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] The volume has not been displaced from its original location: [datastore1] volume-8dce72fb-6084-4a8a-8e72-83746e434be6/volume-8dce72fb-6084-4a8a-8e72-83746e434be6.vmdk. No consolidation needed. {{(pid=63538) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1253.833642] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Reconfiguring VM instance instance-00000071 to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1253.833967] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3792070b-20ba-431e-a072-8f9ff23bbc30 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.852952] env[63538]: DEBUG oslo_vmware.api [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1253.852952] env[63538]: value = "task-5101973" [ 1253.852952] env[63538]: _type = "Task" [ 1253.852952] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.862485] env[63538]: DEBUG oslo_vmware.api [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101973, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.297767] env[63538]: DEBUG oslo_vmware.api [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101972, 'name': Rename_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.363465] env[63538]: DEBUG oslo_vmware.api [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101973, 'name': ReconfigVM_Task, 'duration_secs': 0.243725} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.363760] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Reconfigured VM instance instance-00000071 to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1254.368729] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-119bbaaa-ec18-4e97-a169-754f1d98642f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.387019] env[63538]: DEBUG oslo_vmware.api [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1254.387019] env[63538]: value = "task-5101974" [ 1254.387019] env[63538]: _type = "Task" [ 1254.387019] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.395061] env[63538]: DEBUG oslo_vmware.api [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101974, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.798648] env[63538]: DEBUG oslo_vmware.api [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101972, 'name': Rename_Task, 'duration_secs': 1.164301} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.798973] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1254.799252] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4ab611af-3bc1-42fb-8953-c40cf831ea32 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.806952] env[63538]: DEBUG oslo_vmware.api [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1254.806952] env[63538]: value = "task-5101975" [ 1254.806952] env[63538]: _type = "Task" [ 1254.806952] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.817012] env[63538]: DEBUG oslo_vmware.api [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101975, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.825837] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1254.826149] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1254.885870] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1ee3b0f4-77c0-4d61-a9c4-46521a6bc3b8 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "057f192d-b470-4683-b197-913457d10717" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1254.886150] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1ee3b0f4-77c0-4d61-a9c4-46521a6bc3b8 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "057f192d-b470-4683-b197-913457d10717" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1254.886345] env[63538]: DEBUG nova.compute.manager [None req-1ee3b0f4-77c0-4d61-a9c4-46521a6bc3b8 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1254.887285] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edff70fe-4ce7-4d99-9e12-f339805b7ec0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.898810] env[63538]: DEBUG oslo_vmware.api [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101974, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.900393] env[63538]: DEBUG nova.compute.manager [None req-1ee3b0f4-77c0-4d61-a9c4-46521a6bc3b8 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63538) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1254.900952] env[63538]: DEBUG nova.objects.instance [None req-1ee3b0f4-77c0-4d61-a9c4-46521a6bc3b8 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lazy-loading 'flavor' on Instance uuid 057f192d-b470-4683-b197-913457d10717 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1255.318683] env[63538]: DEBUG oslo_vmware.api [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101975, 'name': PowerOnVM_Task, 'duration_secs': 0.474297} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.319105] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1255.319169] env[63538]: INFO nova.compute.manager [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Took 5.53 seconds to spawn the instance on the hypervisor. [ 1255.319342] env[63538]: DEBUG nova.compute.manager [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1255.320406] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b941ce8-3ebe-4864-a96f-1eb9980fffbf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.333071] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1255.333071] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Starting heal instance info cache {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10017}} [ 1255.395934] env[63538]: DEBUG oslo_vmware.api [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101974, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.406247] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ee3b0f4-77c0-4d61-a9c4-46521a6bc3b8 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1255.406510] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c1d113e6-e757-40c0-90a7-81bc9b5dfa84 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.417248] env[63538]: DEBUG oslo_vmware.api [None req-1ee3b0f4-77c0-4d61-a9c4-46521a6bc3b8 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1255.417248] env[63538]: value = "task-5101977" [ 1255.417248] env[63538]: _type = "Task" [ 1255.417248] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.426919] env[63538]: DEBUG oslo_vmware.api [None req-1ee3b0f4-77c0-4d61-a9c4-46521a6bc3b8 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101977, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.842282] env[63538]: INFO nova.compute.manager [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Took 12.07 seconds to build instance. [ 1255.899872] env[63538]: DEBUG oslo_vmware.api [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101974, 'name': ReconfigVM_Task, 'duration_secs': 1.145948} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.900385] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992554', 'volume_id': '8dce72fb-6084-4a8a-8e72-83746e434be6', 'name': 'volume-8dce72fb-6084-4a8a-8e72-83746e434be6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'f0183c1f-4557-45fd-ba65-4821ef661173', 'attached_at': '2025-12-12T13:01:54.000000', 'detached_at': '', 'volume_id': '8dce72fb-6084-4a8a-8e72-83746e434be6', 'serial': '8dce72fb-6084-4a8a-8e72-83746e434be6'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1255.900851] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1255.901868] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a23023-b9a2-4bca-b98f-dd949e74bd5c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.909839] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1255.910144] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a5f4de1a-c1aa-47f1-aebd-19ecb46c59cb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.931672] env[63538]: DEBUG oslo_vmware.api [None req-1ee3b0f4-77c0-4d61-a9c4-46521a6bc3b8 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101977, 'name': PowerOffVM_Task, 'duration_secs': 0.390997} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.932294] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ee3b0f4-77c0-4d61-a9c4-46521a6bc3b8 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1255.932611] env[63538]: DEBUG nova.compute.manager [None req-1ee3b0f4-77c0-4d61-a9c4-46521a6bc3b8 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1255.933789] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce6bfb84-d893-4f1f-a6ac-1f46b9c812a5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.985284] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1255.985735] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1255.986034] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Deleting the datastore file [datastore1] f0183c1f-4557-45fd-ba65-4821ef661173 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1255.986387] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f5f854ca-394f-496e-a267-b956a93c391a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.996990] env[63538]: DEBUG oslo_vmware.api [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1255.996990] env[63538]: value = "task-5101979" [ 1255.996990] env[63538]: _type = "Task" [ 1255.996990] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.007806] env[63538]: DEBUG oslo_vmware.api [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101979, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.342058] env[63538]: DEBUG oslo_concurrency.lockutils [None req-eb1e93eb-0ef3-489d-a69b-a89dc6c7a026 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "afa669ca-26b3-4b9d-ac9d-abbc966d5798" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.575s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1256.369597] env[63538]: DEBUG nova.compute.manager [req-03f4f02c-1c92-4890-83d1-37f0d7dd7380 req-8c2ec03f-cf30-480a-a4fa-bd8d5fc53945 service nova] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Received event network-changed-5250918c-5112-49ad-b1d3-f73c2d534637 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1256.369855] env[63538]: DEBUG nova.compute.manager [req-03f4f02c-1c92-4890-83d1-37f0d7dd7380 req-8c2ec03f-cf30-480a-a4fa-bd8d5fc53945 service nova] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Refreshing instance network info cache due to event network-changed-5250918c-5112-49ad-b1d3-f73c2d534637. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1256.370090] env[63538]: DEBUG oslo_concurrency.lockutils [req-03f4f02c-1c92-4890-83d1-37f0d7dd7380 req-8c2ec03f-cf30-480a-a4fa-bd8d5fc53945 service nova] Acquiring lock "refresh_cache-fb26fb32-a420-4667-850c-e32786edd8f2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1256.370247] env[63538]: DEBUG oslo_concurrency.lockutils [req-03f4f02c-1c92-4890-83d1-37f0d7dd7380 req-8c2ec03f-cf30-480a-a4fa-bd8d5fc53945 service nova] Acquired lock "refresh_cache-fb26fb32-a420-4667-850c-e32786edd8f2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1256.370413] env[63538]: DEBUG nova.network.neutron [req-03f4f02c-1c92-4890-83d1-37f0d7dd7380 req-8c2ec03f-cf30-480a-a4fa-bd8d5fc53945 service nova] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Refreshing network info cache for port 5250918c-5112-49ad-b1d3-f73c2d534637 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1256.448949] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1ee3b0f4-77c0-4d61-a9c4-46521a6bc3b8 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "057f192d-b470-4683-b197-913457d10717" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.563s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1256.510418] env[63538]: DEBUG oslo_vmware.api [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5101979, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175063} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.510878] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1256.511200] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1256.511818] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1256.511818] env[63538]: INFO nova.compute.manager [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Took 3.28 seconds to destroy the instance on the hypervisor. [ 1256.512153] env[63538]: DEBUG oslo.service.loopingcall [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1256.512472] env[63538]: DEBUG nova.compute.manager [-] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1256.512902] env[63538]: DEBUG nova.network.neutron [-] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1257.090201] env[63538]: DEBUG nova.network.neutron [req-03f4f02c-1c92-4890-83d1-37f0d7dd7380 req-8c2ec03f-cf30-480a-a4fa-bd8d5fc53945 service nova] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Updated VIF entry in instance network info cache for port 5250918c-5112-49ad-b1d3-f73c2d534637. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1257.090581] env[63538]: DEBUG nova.network.neutron [req-03f4f02c-1c92-4890-83d1-37f0d7dd7380 req-8c2ec03f-cf30-480a-a4fa-bd8d5fc53945 service nova] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Updating instance_info_cache with network_info: [{"id": "5250918c-5112-49ad-b1d3-f73c2d534637", "address": "fa:16:3e:6d:44:6f", "network": {"id": "4d592811-793b-45d6-a6d3-fff8ca7bbf30", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-654803980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df090f9a727d4cf4a0f466e27928bdc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5250918c-51", "ovs_interfaceid": "5250918c-5112-49ad-b1d3-f73c2d534637", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1257.370525] env[63538]: DEBUG nova.objects.instance [None req-c2c2dcb5-d21a-4f36-9613-372a531c9665 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lazy-loading 'flavor' on Instance uuid 057f192d-b470-4683-b197-913457d10717 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1257.382020] env[63538]: DEBUG nova.compute.manager [req-9913fd9b-6a6b-4510-82de-cffdd9f763d7 req-19f9bf9e-142e-4b88-a9f8-f2d5eea4d7b5 service nova] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Received event network-vif-deleted-9f09c892-0333-4063-a5da-daa3e2bf19f5 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1257.382262] env[63538]: INFO nova.compute.manager [req-9913fd9b-6a6b-4510-82de-cffdd9f763d7 req-19f9bf9e-142e-4b88-a9f8-f2d5eea4d7b5 service nova] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Neutron deleted interface 9f09c892-0333-4063-a5da-daa3e2bf19f5; detaching it from the instance and deleting it from the info cache [ 1257.382415] env[63538]: DEBUG nova.network.neutron [req-9913fd9b-6a6b-4510-82de-cffdd9f763d7 req-19f9bf9e-142e-4b88-a9f8-f2d5eea4d7b5 service nova] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1257.593432] env[63538]: DEBUG oslo_concurrency.lockutils [req-03f4f02c-1c92-4890-83d1-37f0d7dd7380 req-8c2ec03f-cf30-480a-a4fa-bd8d5fc53945 service nova] Releasing lock "refresh_cache-fb26fb32-a420-4667-850c-e32786edd8f2" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1257.845877] env[63538]: DEBUG nova.network.neutron [-] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1257.854260] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Didn't find any instances for network info cache update. {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10103}} [ 1257.854526] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.855379] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.855662] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.855871] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.856059] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.856263] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.856417] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63538) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10636}} [ 1257.856907] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.876318] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c2c2dcb5-d21a-4f36-9613-372a531c9665 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "refresh_cache-057f192d-b470-4683-b197-913457d10717" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1257.876559] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c2c2dcb5-d21a-4f36-9613-372a531c9665 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired lock "refresh_cache-057f192d-b470-4683-b197-913457d10717" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1257.876791] env[63538]: DEBUG nova.network.neutron [None req-c2c2dcb5-d21a-4f36-9613-372a531c9665 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1257.877019] env[63538]: DEBUG nova.objects.instance [None req-c2c2dcb5-d21a-4f36-9613-372a531c9665 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lazy-loading 'info_cache' on Instance uuid 057f192d-b470-4683-b197-913457d10717 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1257.885494] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-17bc969b-d184-4d4b-b386-b5ba80d0ad8e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.897364] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a32fa0da-572b-4547-aaf9-87bd1ffc0708 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.929168] env[63538]: DEBUG nova.compute.manager [req-9913fd9b-6a6b-4510-82de-cffdd9f763d7 req-19f9bf9e-142e-4b88-a9f8-f2d5eea4d7b5 service nova] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Detach interface failed, port_id=9f09c892-0333-4063-a5da-daa3e2bf19f5, reason: Instance f0183c1f-4557-45fd-ba65-4821ef661173 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1257.932366] env[63538]: DEBUG nova.compute.manager [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Stashing vm_state: active {{(pid=63538) _prep_resize /opt/stack/nova/nova/compute/manager.py:5670}} [ 1258.349853] env[63538]: INFO nova.compute.manager [-] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Took 1.84 seconds to deallocate network for instance. [ 1258.359451] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1258.359687] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1258.359861] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1258.360026] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63538) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1258.360940] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f036df56-0c7b-43d8-9fae-9a635b81c476 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.370021] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a22df5-1bb4-4d75-94c3-bea502a8ea7b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.385288] env[63538]: DEBUG nova.objects.base [None req-c2c2dcb5-d21a-4f36-9613-372a531c9665 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Object Instance<057f192d-b470-4683-b197-913457d10717> lazy-loaded attributes: flavor,info_cache {{(pid=63538) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1258.387229] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d006830e-6fd3-4eec-a2c1-4eacfc61de65 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.395094] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e1672f-1ecb-41c0-b47b-f08873803af2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.402965] env[63538]: DEBUG nova.compute.manager [req-0a1cc253-8fd6-4e33-b555-e06ffb7a71f3 req-0dd061b2-1aee-477c-b26d-6e0d28edc6bf service nova] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Received event network-changed-ec3a3da3-8713-4358-bd22-4e497a90c904 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1258.402965] env[63538]: DEBUG nova.compute.manager [req-0a1cc253-8fd6-4e33-b555-e06ffb7a71f3 req-0dd061b2-1aee-477c-b26d-6e0d28edc6bf service nova] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Refreshing instance network info cache due to event network-changed-ec3a3da3-8713-4358-bd22-4e497a90c904. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1258.403206] env[63538]: DEBUG oslo_concurrency.lockutils [req-0a1cc253-8fd6-4e33-b555-e06ffb7a71f3 req-0dd061b2-1aee-477c-b26d-6e0d28edc6bf service nova] Acquiring lock "refresh_cache-afa669ca-26b3-4b9d-ac9d-abbc966d5798" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1258.403291] env[63538]: DEBUG oslo_concurrency.lockutils [req-0a1cc253-8fd6-4e33-b555-e06ffb7a71f3 req-0dd061b2-1aee-477c-b26d-6e0d28edc6bf service nova] Acquired lock "refresh_cache-afa669ca-26b3-4b9d-ac9d-abbc966d5798" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1258.403454] env[63538]: DEBUG nova.network.neutron [req-0a1cc253-8fd6-4e33-b555-e06ffb7a71f3 req-0dd061b2-1aee-477c-b26d-6e0d28edc6bf service nova] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Refreshing network info cache for port ec3a3da3-8713-4358-bd22-4e497a90c904 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1258.430748] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179556MB free_disk=95GB free_vcpus=48 pci_devices=None {{(pid=63538) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1258.430877] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1258.431030] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1258.459510] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1258.778334] env[63538]: DEBUG nova.network.neutron [req-0a1cc253-8fd6-4e33-b555-e06ffb7a71f3 req-0dd061b2-1aee-477c-b26d-6e0d28edc6bf service nova] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Updated VIF entry in instance network info cache for port ec3a3da3-8713-4358-bd22-4e497a90c904. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1258.778831] env[63538]: DEBUG nova.network.neutron [req-0a1cc253-8fd6-4e33-b555-e06ffb7a71f3 req-0dd061b2-1aee-477c-b26d-6e0d28edc6bf service nova] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Updating instance_info_cache with network_info: [{"id": "ec3a3da3-8713-4358-bd22-4e497a90c904", "address": "fa:16:3e:76:73:41", "network": {"id": "4d592811-793b-45d6-a6d3-fff8ca7bbf30", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-654803980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df090f9a727d4cf4a0f466e27928bdc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec3a3da3-87", "ovs_interfaceid": "ec3a3da3-8713-4358-bd22-4e497a90c904", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1258.901023] env[63538]: INFO nova.compute.manager [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Took 0.55 seconds to detach 1 volumes for instance. [ 1259.281543] env[63538]: DEBUG oslo_concurrency.lockutils [req-0a1cc253-8fd6-4e33-b555-e06ffb7a71f3 req-0dd061b2-1aee-477c-b26d-6e0d28edc6bf service nova] Releasing lock "refresh_cache-afa669ca-26b3-4b9d-ac9d-abbc966d5798" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1259.318598] env[63538]: DEBUG nova.network.neutron [None req-c2c2dcb5-d21a-4f36-9613-372a531c9665 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Updating instance_info_cache with network_info: [{"id": "3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1", "address": "fa:16:3e:73:08:09", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cef0125-1c", "ovs_interfaceid": "3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1259.407743] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1259.443588] env[63538]: INFO nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Updating resource usage from migration 9ec5125d-05d3-4a74-ada6-46cf08ebfece [ 1259.467708] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance fb26fb32-a420-4667-850c-e32786edd8f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1259.468691] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance e3feec17-ca1b-4873-bb0a-370c3868aabf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1259.468691] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 057f192d-b470-4683-b197-913457d10717 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1259.468691] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance e93aab2e-f8c4-4959-923f-0449a84108d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1259.468929] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance c1766d8e-7949-4fa8-a762-007d016a4de1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1259.468929] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance f0183c1f-4557-45fd-ba65-4821ef661173 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1259.469086] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Migration 9ec5125d-05d3-4a74-ada6-46cf08ebfece is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1715}} [ 1259.469211] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance afa669ca-26b3-4b9d-ac9d-abbc966d5798 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1259.469490] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=63538) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1259.469703] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=100GB used_disk=6GB total_vcpus=48 used_vcpus=7 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '7', 'num_vm_active': '6', 'num_task_None': '4', 'num_os_type_None': '7', 'num_proj_df090f9a727d4cf4a0f466e27928bdc6': '3', 'io_workload': '1', 'num_proj_9b1eba931f144b94b6e186dac1310dfa': '2', 'num_task_deleting': '1', 'num_proj_1a06b7cc1ab24ba584bbe970e4fc5e81': '1', 'num_vm_stopped': '1', 'num_task_powering-on': '1', 'num_proj_0d6954a5254f441ca256c85330297cef': '1', 'num_task_resize_prep': '1'} {{(pid=63538) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1259.579198] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d40f2243-28b4-49c5-a549-2473200b08d1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.587342] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b206072-cddc-47d8-bc40-04eeba5796a5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.619382] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a77c223a-30c8-4772-97c8-c5771845e96f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.626427] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d4eccb2-ede2-43ab-85f9-ec829e10864c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.643571] env[63538]: DEBUG nova.compute.provider_tree [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1259.821959] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c2c2dcb5-d21a-4f36-9613-372a531c9665 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Releasing lock "refresh_cache-057f192d-b470-4683-b197-913457d10717" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1260.146400] env[63538]: DEBUG nova.scheduler.client.report [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1260.325413] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2c2dcb5-d21a-4f36-9613-372a531c9665 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1260.325875] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-79ed3c2e-ba90-4831-b8e1-a307a9d40636 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.333579] env[63538]: DEBUG oslo_vmware.api [None req-c2c2dcb5-d21a-4f36-9613-372a531c9665 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1260.333579] env[63538]: value = "task-5101980" [ 1260.333579] env[63538]: _type = "Task" [ 1260.333579] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.342840] env[63538]: DEBUG oslo_vmware.api [None req-c2c2dcb5-d21a-4f36-9613-372a531c9665 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101980, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.651569] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63538) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1260.651995] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.221s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1260.652203] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.193s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1260.843978] env[63538]: DEBUG oslo_vmware.api [None req-c2c2dcb5-d21a-4f36-9613-372a531c9665 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101980, 'name': PowerOnVM_Task, 'duration_secs': 0.44496} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.844317] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2c2dcb5-d21a-4f36-9613-372a531c9665 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1260.844518] env[63538]: DEBUG nova.compute.manager [None req-c2c2dcb5-d21a-4f36-9613-372a531c9665 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1260.845328] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8c1e3a6-2f90-4824-80aa-eb38a8ceca29 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.157101] env[63538]: INFO nova.compute.claims [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1261.663617] env[63538]: INFO nova.compute.resource_tracker [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Updating resource usage from migration 9ec5125d-05d3-4a74-ada6-46cf08ebfece [ 1261.776762] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-517f8801-58c7-4c50-9090-520697f2ea20 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.788575] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a6b754-36af-419b-bbdf-c222c7927d74 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.822736] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75bfcbbf-6b1a-4160-b169-0fe99183173f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.831312] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88192c6c-7453-44bb-9541-f46bd88e0f53 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.845428] env[63538]: DEBUG nova.compute.provider_tree [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1262.349115] env[63538]: DEBUG nova.scheduler.client.report [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1262.557772] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44acd035-33db-4f51-9b77-99f7c8f7116d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.564755] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e912d34c-e49e-461d-bf0e-fea946c07cac tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Suspending the VM {{(pid=63538) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1163}} [ 1262.565050] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-73a3b626-3f95-421d-a9d5-6bf108d99759 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.571576] env[63538]: DEBUG oslo_vmware.api [None req-e912d34c-e49e-461d-bf0e-fea946c07cac tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1262.571576] env[63538]: value = "task-5101981" [ 1262.571576] env[63538]: _type = "Task" [ 1262.571576] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.581044] env[63538]: DEBUG oslo_vmware.api [None req-e912d34c-e49e-461d-bf0e-fea946c07cac tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101981, 'name': SuspendVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.855764] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.203s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1262.856257] env[63538]: INFO nova.compute.manager [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Migrating [ 1262.867783] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.460s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1262.868163] env[63538]: DEBUG nova.objects.instance [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lazy-loading 'resources' on Instance uuid f0183c1f-4557-45fd-ba65-4821ef661173 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1263.082856] env[63538]: DEBUG oslo_vmware.api [None req-e912d34c-e49e-461d-bf0e-fea946c07cac tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101981, 'name': SuspendVM_Task} progress is 62%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.383070] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "refresh_cache-afa669ca-26b3-4b9d-ac9d-abbc966d5798" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1263.383070] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquired lock "refresh_cache-afa669ca-26b3-4b9d-ac9d-abbc966d5798" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1263.383070] env[63538]: DEBUG nova.network.neutron [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1263.486094] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87120326-36ac-47cb-ac29-97c93f8ca1f8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.493910] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de86827f-d3c4-4333-86e1-56327d1e6e92 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.526234] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0256f32-e560-483a-afe8-52819a0686e4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.534104] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac38a090-91c8-4846-8b22-adbfb3fa89a1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.547439] env[63538]: DEBUG nova.compute.provider_tree [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1263.582593] env[63538]: DEBUG oslo_vmware.api [None req-e912d34c-e49e-461d-bf0e-fea946c07cac tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101981, 'name': SuspendVM_Task, 'duration_secs': 0.663325} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.582885] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e912d34c-e49e-461d-bf0e-fea946c07cac tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Suspended the VM {{(pid=63538) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1167}} [ 1263.583114] env[63538]: DEBUG nova.compute.manager [None req-e912d34c-e49e-461d-bf0e-fea946c07cac tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1263.583869] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f40cac-c399-43f6-b9d0-c7e64f85ce40 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.050407] env[63538]: DEBUG nova.scheduler.client.report [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1264.130627] env[63538]: DEBUG nova.network.neutron [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Updating instance_info_cache with network_info: [{"id": "ec3a3da3-8713-4358-bd22-4e497a90c904", "address": "fa:16:3e:76:73:41", "network": {"id": "4d592811-793b-45d6-a6d3-fff8ca7bbf30", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-654803980-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df090f9a727d4cf4a0f466e27928bdc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec3a3da3-87", "ovs_interfaceid": "ec3a3da3-8713-4358-bd22-4e497a90c904", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1264.298260] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "c1766d8e-7949-4fa8-a762-007d016a4de1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1264.298744] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "c1766d8e-7949-4fa8-a762-007d016a4de1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1264.299091] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "c1766d8e-7949-4fa8-a762-007d016a4de1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1264.299372] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "c1766d8e-7949-4fa8-a762-007d016a4de1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1264.299628] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "c1766d8e-7949-4fa8-a762-007d016a4de1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1264.302445] env[63538]: INFO nova.compute.manager [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Terminating instance [ 1264.306869] env[63538]: DEBUG nova.compute.manager [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1264.307163] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1264.308384] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a31aef57-a9f1-46bc-9597-c385c18cd4ef {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.319016] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1264.319362] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-07bfcda9-2e5a-4758-a94f-cd4fb7586d59 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.326377] env[63538]: DEBUG oslo_vmware.api [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1264.326377] env[63538]: value = "task-5101982" [ 1264.326377] env[63538]: _type = "Task" [ 1264.326377] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.335219] env[63538]: DEBUG oslo_vmware.api [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101982, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.554740] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.687s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1264.577441] env[63538]: INFO nova.scheduler.client.report [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Deleted allocations for instance f0183c1f-4557-45fd-ba65-4821ef661173 [ 1264.633341] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Releasing lock "refresh_cache-afa669ca-26b3-4b9d-ac9d-abbc966d5798" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1264.920148] env[63538]: DEBUG oslo_vmware.api [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101982, 'name': PowerOffVM_Task, 'duration_secs': 0.181314} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.920148] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1264.920148] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1264.920148] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-61ae42e2-aaae-4df6-bcb2-91e5859714c2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.920148] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1264.920148] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1264.920148] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Deleting the datastore file [datastore1] c1766d8e-7949-4fa8-a762-007d016a4de1 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1264.920148] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-95f2e1c4-3fd6-4060-8226-9cbaae90f733 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.920148] env[63538]: DEBUG oslo_vmware.api [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1264.920148] env[63538]: value = "task-5101984" [ 1264.920148] env[63538]: _type = "Task" [ 1264.920148] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.928295] env[63538]: DEBUG oslo_vmware.api [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101984, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.936907] env[63538]: INFO nova.compute.manager [None req-8b7f3825-f9e2-4bb3-8527-b2d9b39c8c04 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Resuming [ 1264.937598] env[63538]: DEBUG nova.objects.instance [None req-8b7f3825-f9e2-4bb3-8527-b2d9b39c8c04 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lazy-loading 'flavor' on Instance uuid 057f192d-b470-4683-b197-913457d10717 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1265.089083] env[63538]: DEBUG oslo_concurrency.lockutils [None req-1a569a45-79e8-40be-a8e9-3737b15e82c8 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "f0183c1f-4557-45fd-ba65-4821ef661173" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.863s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1265.431063] env[63538]: DEBUG oslo_vmware.api [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101984, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144286} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.431063] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1265.431063] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1265.431341] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1265.431451] env[63538]: INFO nova.compute.manager [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1265.431668] env[63538]: DEBUG oslo.service.loopingcall [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1265.432031] env[63538]: DEBUG nova.compute.manager [-] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1265.432031] env[63538]: DEBUG nova.network.neutron [-] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1265.946935] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8b7f3825-f9e2-4bb3-8527-b2d9b39c8c04 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "refresh_cache-057f192d-b470-4683-b197-913457d10717" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1265.946935] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8b7f3825-f9e2-4bb3-8527-b2d9b39c8c04 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquired lock "refresh_cache-057f192d-b470-4683-b197-913457d10717" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1265.947215] env[63538]: DEBUG nova.network.neutron [None req-8b7f3825-f9e2-4bb3-8527-b2d9b39c8c04 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1265.953940] env[63538]: DEBUG nova.compute.manager [req-c4e49cc6-3868-432b-8cda-8bfe014c05fc req-a0e19bb9-d67d-4c0d-8de4-4e7fb91046c5 service nova] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Received event network-vif-deleted-632e844e-c3bd-4596-8708-86ed6e0abef8 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1265.954297] env[63538]: INFO nova.compute.manager [req-c4e49cc6-3868-432b-8cda-8bfe014c05fc req-a0e19bb9-d67d-4c0d-8de4-4e7fb91046c5 service nova] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Neutron deleted interface 632e844e-c3bd-4596-8708-86ed6e0abef8; detaching it from the instance and deleting it from the info cache [ 1265.954495] env[63538]: DEBUG nova.network.neutron [req-c4e49cc6-3868-432b-8cda-8bfe014c05fc req-a0e19bb9-d67d-4c0d-8de4-4e7fb91046c5 service nova] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1266.048157] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "d6215939-5e06-425d-b947-224eebb8386b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1266.048412] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "d6215939-5e06-425d-b947-224eebb8386b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1266.147798] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ffae3e-a731-4ed0-acb6-4238a1a69005 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.167143] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Updating instance 'afa669ca-26b3-4b9d-ac9d-abbc966d5798' progress to 0 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1266.424671] env[63538]: DEBUG nova.network.neutron [-] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1266.457259] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-398b160c-1e74-4d6f-8394-f2f81ea4393c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.467918] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66556886-994c-4525-b365-c073d2b7fc00 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.501938] env[63538]: DEBUG nova.compute.manager [req-c4e49cc6-3868-432b-8cda-8bfe014c05fc req-a0e19bb9-d67d-4c0d-8de4-4e7fb91046c5 service nova] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Detach interface failed, port_id=632e844e-c3bd-4596-8708-86ed6e0abef8, reason: Instance c1766d8e-7949-4fa8-a762-007d016a4de1 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1266.550801] env[63538]: DEBUG nova.compute.manager [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1266.673222] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1266.673524] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-93a63590-05e1-479f-806f-9370728d150f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.676569] env[63538]: DEBUG nova.network.neutron [None req-8b7f3825-f9e2-4bb3-8527-b2d9b39c8c04 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Updating instance_info_cache with network_info: [{"id": "3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1", "address": "fa:16:3e:73:08:09", "network": {"id": "28598c3b-2c91-4ca8-847e-240c6c3831dc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2040762927-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6954a5254f441ca256c85330297cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f39e3b37-7906-4bbc-820e-ceac74e4d827", "external-id": "nsx-vlan-transportzone-328", "segmentation_id": 328, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cef0125-1c", "ovs_interfaceid": "3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1266.684565] env[63538]: DEBUG oslo_vmware.api [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1266.684565] env[63538]: value = "task-5101985" [ 1266.684565] env[63538]: _type = "Task" [ 1266.684565] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.698134] env[63538]: DEBUG oslo_vmware.api [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101985, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.927788] env[63538]: INFO nova.compute.manager [-] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Took 1.50 seconds to deallocate network for instance. [ 1267.075805] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1267.076079] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1267.077823] env[63538]: INFO nova.compute.claims [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1267.178899] env[63538]: DEBUG oslo_concurrency.lockutils [None req-8b7f3825-f9e2-4bb3-8527-b2d9b39c8c04 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Releasing lock "refresh_cache-057f192d-b470-4683-b197-913457d10717" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1267.179853] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6775c7a-1c59-4448-8161-8e40431fe134 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.186904] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8b7f3825-f9e2-4bb3-8527-b2d9b39c8c04 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Resuming the VM {{(pid=63538) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1184}} [ 1267.190170] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3d45d1b0-510a-49b0-b4cc-7fbb3aaa1766 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.197036] env[63538]: DEBUG oslo_vmware.api [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101985, 'name': PowerOffVM_Task, 'duration_secs': 0.168674} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.198280] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1267.198496] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Updating instance 'afa669ca-26b3-4b9d-ac9d-abbc966d5798' progress to 17 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1267.202521] env[63538]: DEBUG oslo_vmware.api [None req-8b7f3825-f9e2-4bb3-8527-b2d9b39c8c04 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1267.202521] env[63538]: value = "task-5101986" [ 1267.202521] env[63538]: _type = "Task" [ 1267.202521] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.211643] env[63538]: DEBUG oslo_vmware.api [None req-8b7f3825-f9e2-4bb3-8527-b2d9b39c8c04 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101986, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.434944] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1267.706750] env[63538]: DEBUG nova.virt.hardware [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1267.707126] env[63538]: DEBUG nova.virt.hardware [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1267.707437] env[63538]: DEBUG nova.virt.hardware [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1267.707909] env[63538]: DEBUG nova.virt.hardware [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1267.708223] env[63538]: DEBUG nova.virt.hardware [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1267.708458] env[63538]: DEBUG nova.virt.hardware [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1267.708702] env[63538]: DEBUG nova.virt.hardware [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1267.708886] env[63538]: DEBUG nova.virt.hardware [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1267.709082] env[63538]: DEBUG nova.virt.hardware [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1267.709286] env[63538]: DEBUG nova.virt.hardware [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1267.709488] env[63538]: DEBUG nova.virt.hardware [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1267.714989] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bcc5b50b-3113-42ed-a6c6-8841b0971bde {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.744513] env[63538]: DEBUG oslo_vmware.api [None req-8b7f3825-f9e2-4bb3-8527-b2d9b39c8c04 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101986, 'name': PowerOnVM_Task, 'duration_secs': 0.514457} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.746312] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-8b7f3825-f9e2-4bb3-8527-b2d9b39c8c04 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Resumed the VM {{(pid=63538) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1189}} [ 1267.746530] env[63538]: DEBUG nova.compute.manager [None req-8b7f3825-f9e2-4bb3-8527-b2d9b39c8c04 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1267.746894] env[63538]: DEBUG oslo_vmware.api [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1267.746894] env[63538]: value = "task-5101987" [ 1267.746894] env[63538]: _type = "Task" [ 1267.746894] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.747747] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f419e2-58f3-440d-8cbb-7a0e5853d0cf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.764114] env[63538]: DEBUG oslo_vmware.api [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101987, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.208679] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f390053-256f-4fff-a4bd-3973b7fc544c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.217477] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9854080d-ff85-41a8-830c-acab8f33b56e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.250363] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab75b68-92cc-4347-993f-dae4d95b38c2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.266012] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801b8481-9855-477d-9036-a386b318a876 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.274638] env[63538]: DEBUG oslo_vmware.api [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101987, 'name': ReconfigVM_Task, 'duration_secs': 0.300308} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.276441] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Updating instance 'afa669ca-26b3-4b9d-ac9d-abbc966d5798' progress to 33 {{(pid=63538) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1268.297764] env[63538]: DEBUG nova.compute.provider_tree [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1268.658789] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "057f192d-b470-4683-b197-913457d10717" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1268.659110] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "057f192d-b470-4683-b197-913457d10717" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1268.659344] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "057f192d-b470-4683-b197-913457d10717-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1268.659551] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "057f192d-b470-4683-b197-913457d10717-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1268.659767] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "057f192d-b470-4683-b197-913457d10717-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1268.662060] env[63538]: INFO nova.compute.manager [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Terminating instance [ 1268.663884] env[63538]: DEBUG nova.compute.manager [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1268.664105] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1268.664926] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cde89001-d422-4724-b4ca-fadd4a161450 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.672936] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1268.673187] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9242ffc3-a211-42cf-aad5-08b2d3e1315c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.679778] env[63538]: DEBUG oslo_vmware.api [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1268.679778] env[63538]: value = "task-5101988" [ 1268.679778] env[63538]: _type = "Task" [ 1268.679778] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.687795] env[63538]: DEBUG oslo_vmware.api [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101988, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.785776] env[63538]: DEBUG nova.virt.hardware [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1268.786096] env[63538]: DEBUG nova.virt.hardware [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1268.786300] env[63538]: DEBUG nova.virt.hardware [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1268.786525] env[63538]: DEBUG nova.virt.hardware [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1268.786732] env[63538]: DEBUG nova.virt.hardware [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1268.786924] env[63538]: DEBUG nova.virt.hardware [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1268.787218] env[63538]: DEBUG nova.virt.hardware [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1268.787420] env[63538]: DEBUG nova.virt.hardware [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1268.787724] env[63538]: DEBUG nova.virt.hardware [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1268.787950] env[63538]: DEBUG nova.virt.hardware [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1268.788198] env[63538]: DEBUG nova.virt.hardware [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1268.800586] env[63538]: DEBUG nova.scheduler.client.report [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1269.189875] env[63538]: DEBUG oslo_vmware.api [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101988, 'name': PowerOffVM_Task, 'duration_secs': 0.186201} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.190176] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1269.190353] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1269.190606] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-31fbc2e6-1370-4916-990c-c095d23ba679 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.252859] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1269.253288] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1269.253405] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Deleting the datastore file [datastore1] 057f192d-b470-4683-b197-913457d10717 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1269.253681] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-643dfd82-c4f4-4bc4-aeea-2f87e0d23635 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.260732] env[63538]: DEBUG oslo_vmware.api [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for the task: (returnval){ [ 1269.260732] env[63538]: value = "task-5101990" [ 1269.260732] env[63538]: _type = "Task" [ 1269.260732] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.270280] env[63538]: DEBUG oslo_vmware.api [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101990, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.297289] env[63538]: ERROR nova.compute.manager [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Setting instance vm_state to ERROR: AttributeError: 'NoneType' object has no attribute 'key' [ 1269.297289] env[63538]: ERROR nova.compute.manager [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Traceback (most recent call last): [ 1269.297289] env[63538]: ERROR nova.compute.manager [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] File "/opt/stack/nova/nova/compute/manager.py", line 10954, in _error_out_instance_on_exception [ 1269.297289] env[63538]: ERROR nova.compute.manager [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] yield [ 1269.297289] env[63538]: ERROR nova.compute.manager [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] File "/opt/stack/nova/nova/compute/manager.py", line 6151, in _resize_instance [ 1269.297289] env[63538]: ERROR nova.compute.manager [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] disk_info = self.driver.migrate_disk_and_power_off( [ 1269.297289] env[63538]: ERROR nova.compute.manager [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 264, in migrate_disk_and_power_off [ 1269.297289] env[63538]: ERROR nova.compute.manager [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] return self._vmops.migrate_disk_and_power_off(context, instance, [ 1269.297289] env[63538]: ERROR nova.compute.manager [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1467, in migrate_disk_and_power_off [ 1269.297289] env[63538]: ERROR nova.compute.manager [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] self._resize_disk(instance, vm_ref, vmdk, flavor) [ 1269.297289] env[63538]: ERROR nova.compute.manager [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1398, in _resize_disk [ 1269.297289] env[63538]: ERROR nova.compute.manager [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] self._volumeops.detach_disk_from_vm(vm_ref, instance, vmdk.device) [ 1269.297289] env[63538]: ERROR nova.compute.manager [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 121, in detach_disk_from_vm [ 1269.297289] env[63538]: ERROR nova.compute.manager [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] disk_key = device.key [ 1269.297289] env[63538]: ERROR nova.compute.manager [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] AttributeError: 'NoneType' object has no attribute 'key' [ 1269.297289] env[63538]: ERROR nova.compute.manager [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] [ 1269.305387] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.229s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1269.305963] env[63538]: DEBUG nova.compute.manager [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1269.308915] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.874s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1269.309210] env[63538]: DEBUG nova.objects.instance [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lazy-loading 'resources' on Instance uuid c1766d8e-7949-4fa8-a762-007d016a4de1 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1269.770438] env[63538]: DEBUG oslo_vmware.api [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Task: {'id': task-5101990, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155764} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.770702] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1269.770899] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1269.771096] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1269.771300] env[63538]: INFO nova.compute.manager [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] [instance: 057f192d-b470-4683-b197-913457d10717] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1269.771607] env[63538]: DEBUG oslo.service.loopingcall [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1269.771816] env[63538]: DEBUG nova.compute.manager [-] [instance: 057f192d-b470-4683-b197-913457d10717] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1269.771914] env[63538]: DEBUG nova.network.neutron [-] [instance: 057f192d-b470-4683-b197-913457d10717] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1269.812123] env[63538]: DEBUG nova.compute.utils [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1269.813633] env[63538]: DEBUG nova.compute.manager [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1269.813816] env[63538]: DEBUG nova.network.neutron [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1269.820377] env[63538]: INFO nova.compute.manager [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Swapping old allocation on dict_keys(['f65218a4-1d3d-476a-9093-01cae92c8635']) held by migration 9ec5125d-05d3-4a74-ada6-46cf08ebfece for instance [ 1269.841136] env[63538]: DEBUG nova.scheduler.client.report [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Overwriting current allocation {'allocations': {'f65218a4-1d3d-476a-9093-01cae92c8635': {'resources': {'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 174}}, 'project_id': 'df090f9a727d4cf4a0f466e27928bdc6', 'user_id': 'f315670d336b49d6a732297656ce515a', 'consumer_generation': 1} on consumer afa669ca-26b3-4b9d-ac9d-abbc966d5798 {{(pid=63538) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1269.866797] env[63538]: DEBUG nova.policy [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ad1bddeca5346dea39d23339e09db3d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1a06b7cc1ab24ba584bbe970e4fc5e81', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1269.920539] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d462b664-06f3-4c93-8ed5-3bd1b5eb8093 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.929049] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c37ecc-40d5-4ef7-9adc-e032f4202609 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.959526] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e67e4ecd-0216-417f-b227-8c8f4694188e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.967595] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9429738d-6ef1-414a-a99b-dcaacfe5f9d8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.982860] env[63538]: DEBUG nova.compute.provider_tree [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1270.147999] env[63538]: DEBUG nova.network.neutron [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Successfully created port: 5f276ada-dc8e-4558-a169-bfcaf25172e8 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1270.221883] env[63538]: DEBUG nova.compute.manager [req-5da0cb4d-23da-4bfc-a99c-ddb0b1cc8306 req-277771b9-0f19-452d-a3a0-bdce019f709a service nova] [instance: 057f192d-b470-4683-b197-913457d10717] Received event network-vif-deleted-3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1270.222164] env[63538]: INFO nova.compute.manager [req-5da0cb4d-23da-4bfc-a99c-ddb0b1cc8306 req-277771b9-0f19-452d-a3a0-bdce019f709a service nova] [instance: 057f192d-b470-4683-b197-913457d10717] Neutron deleted interface 3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1; detaching it from the instance and deleting it from the info cache [ 1270.222296] env[63538]: DEBUG nova.network.neutron [req-5da0cb4d-23da-4bfc-a99c-ddb0b1cc8306 req-277771b9-0f19-452d-a3a0-bdce019f709a service nova] [instance: 057f192d-b470-4683-b197-913457d10717] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1270.317030] env[63538]: DEBUG nova.compute.manager [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1270.485676] env[63538]: DEBUG nova.scheduler.client.report [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1270.692564] env[63538]: DEBUG nova.network.neutron [-] [instance: 057f192d-b470-4683-b197-913457d10717] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1270.725644] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-17e71dd1-1521-4771-a902-2e8ae69c75b0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.735782] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a92b563-3610-4444-ba45-9d3c24b2d8c4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.767735] env[63538]: DEBUG nova.compute.manager [req-5da0cb4d-23da-4bfc-a99c-ddb0b1cc8306 req-277771b9-0f19-452d-a3a0-bdce019f709a service nova] [instance: 057f192d-b470-4683-b197-913457d10717] Detach interface failed, port_id=3cef0125-1cc7-4d4b-8c6a-2753ca19d0e1, reason: Instance 057f192d-b470-4683-b197-913457d10717 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1270.991175] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.682s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.015460] env[63538]: INFO nova.scheduler.client.report [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Deleted allocations for instance c1766d8e-7949-4fa8-a762-007d016a4de1 [ 1271.061852] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "afa669ca-26b3-4b9d-ac9d-abbc966d5798" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1271.062214] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "afa669ca-26b3-4b9d-ac9d-abbc966d5798" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1271.062406] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "afa669ca-26b3-4b9d-ac9d-abbc966d5798-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1271.062592] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "afa669ca-26b3-4b9d-ac9d-abbc966d5798-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1271.062789] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "afa669ca-26b3-4b9d-ac9d-abbc966d5798-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.065190] env[63538]: INFO nova.compute.manager [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Terminating instance [ 1271.067920] env[63538]: DEBUG nova.compute.manager [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1271.068147] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1271.068645] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-abab037e-61e7-421e-97c5-e119a2186b8e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.076736] env[63538]: DEBUG oslo_vmware.api [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1271.076736] env[63538]: value = "task-5101991" [ 1271.076736] env[63538]: _type = "Task" [ 1271.076736] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.084949] env[63538]: DEBUG oslo_vmware.api [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101991, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.195089] env[63538]: INFO nova.compute.manager [-] [instance: 057f192d-b470-4683-b197-913457d10717] Took 1.42 seconds to deallocate network for instance. [ 1271.325876] env[63538]: DEBUG nova.compute.manager [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1271.351967] env[63538]: DEBUG nova.virt.hardware [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1271.352267] env[63538]: DEBUG nova.virt.hardware [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1271.352433] env[63538]: DEBUG nova.virt.hardware [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1271.352626] env[63538]: DEBUG nova.virt.hardware [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1271.352782] env[63538]: DEBUG nova.virt.hardware [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1271.352935] env[63538]: DEBUG nova.virt.hardware [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1271.353166] env[63538]: DEBUG nova.virt.hardware [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1271.353334] env[63538]: DEBUG nova.virt.hardware [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1271.353508] env[63538]: DEBUG nova.virt.hardware [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1271.353681] env[63538]: DEBUG nova.virt.hardware [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1271.353862] env[63538]: DEBUG nova.virt.hardware [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1271.354744] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-655202f9-c537-47be-acf5-f5b250a9beff {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.363019] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b37733f-5757-453d-b770-bb482bf2b84a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.405984] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1271.405984] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1271.493912] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd1d3b6-f579-4663-8814-aeb29662ec0d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.502720] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1997927e-eda0-4f2c-b532-ad8e7111a4de {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.536530] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6d3c72d-87d3-41b9-af09-5a79cef56c0a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.539871] env[63538]: DEBUG oslo_concurrency.lockutils [None req-e3b3a0be-a367-4ce2-ba56-4bfc044da1cf tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "c1766d8e-7949-4fa8-a762-007d016a4de1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.241s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.546699] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-830c7469-b437-4e67-9258-e7e310006f2d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.562205] env[63538]: DEBUG nova.compute.provider_tree [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1271.587357] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] VM already powered off {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1271.587773] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Volume detach. Driver type: vmdk {{(pid=63538) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1271.587871] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992559', 'volume_id': 'f4f9e20f-58de-40de-b281-a78c4a4444b2', 'name': 'volume-f4f9e20f-58de-40de-b281-a78c4a4444b2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'afa669ca-26b3-4b9d-ac9d-abbc966d5798', 'attached_at': '', 'detached_at': '', 'volume_id': 'f4f9e20f-58de-40de-b281-a78c4a4444b2', 'serial': 'f4f9e20f-58de-40de-b281-a78c4a4444b2'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1271.588677] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b9cfd16-ae60-4319-bdaa-92cf00298491 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.608267] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf162c7a-c0a7-4b62-bfdf-27fbaaf20780 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.615639] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d2bc86-aaae-44b2-96dc-a1f9f12c63f9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.633770] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a77c70e-732d-4f66-9b3a-90fe80e7df64 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.649642] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] The volume has not been displaced from its original location: [datastore2] volume-f4f9e20f-58de-40de-b281-a78c4a4444b2/volume-f4f9e20f-58de-40de-b281-a78c4a4444b2.vmdk. No consolidation needed. {{(pid=63538) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1271.654962] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Reconfiguring VM instance instance-00000079 to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1271.655360] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0962cc7b-8932-4d82-a8c5-d6b9416b0144 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.674159] env[63538]: DEBUG oslo_vmware.api [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1271.674159] env[63538]: value = "task-5101992" [ 1271.674159] env[63538]: _type = "Task" [ 1271.674159] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.683999] env[63538]: DEBUG oslo_vmware.api [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101992, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.702513] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1271.709676] env[63538]: DEBUG nova.compute.manager [req-f51c5857-a980-4da2-b33e-83d9c23d996a req-c0742a79-40c9-4957-b145-1d30a7d05194 service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Received event network-vif-plugged-5f276ada-dc8e-4558-a169-bfcaf25172e8 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1271.709903] env[63538]: DEBUG oslo_concurrency.lockutils [req-f51c5857-a980-4da2-b33e-83d9c23d996a req-c0742a79-40c9-4957-b145-1d30a7d05194 service nova] Acquiring lock "d6215939-5e06-425d-b947-224eebb8386b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1271.710127] env[63538]: DEBUG oslo_concurrency.lockutils [req-f51c5857-a980-4da2-b33e-83d9c23d996a req-c0742a79-40c9-4957-b145-1d30a7d05194 service nova] Lock "d6215939-5e06-425d-b947-224eebb8386b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1271.710454] env[63538]: DEBUG oslo_concurrency.lockutils [req-f51c5857-a980-4da2-b33e-83d9c23d996a req-c0742a79-40c9-4957-b145-1d30a7d05194 service nova] Lock "d6215939-5e06-425d-b947-224eebb8386b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.711412] env[63538]: DEBUG nova.compute.manager [req-f51c5857-a980-4da2-b33e-83d9c23d996a req-c0742a79-40c9-4957-b145-1d30a7d05194 service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] No waiting events found dispatching network-vif-plugged-5f276ada-dc8e-4558-a169-bfcaf25172e8 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1271.711412] env[63538]: WARNING nova.compute.manager [req-f51c5857-a980-4da2-b33e-83d9c23d996a req-c0742a79-40c9-4957-b145-1d30a7d05194 service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Received unexpected event network-vif-plugged-5f276ada-dc8e-4558-a169-bfcaf25172e8 for instance with vm_state building and task_state spawning. [ 1271.802745] env[63538]: DEBUG nova.network.neutron [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Successfully updated port: 5f276ada-dc8e-4558-a169-bfcaf25172e8 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1272.065740] env[63538]: DEBUG nova.scheduler.client.report [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1272.184307] env[63538]: DEBUG oslo_vmware.api [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101992, 'name': ReconfigVM_Task, 'duration_secs': 0.175686} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.184592] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Reconfigured VM instance instance-00000079 to detach disk 2000 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1272.189535] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d20916be-7b1f-4dc4-b1ad-3de9a20032a8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.204183] env[63538]: DEBUG oslo_vmware.api [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1272.204183] env[63538]: value = "task-5101993" [ 1272.204183] env[63538]: _type = "Task" [ 1272.204183] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.212265] env[63538]: DEBUG oslo_vmware.api [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101993, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.305655] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "refresh_cache-d6215939-5e06-425d-b947-224eebb8386b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1272.305859] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquired lock "refresh_cache-d6215939-5e06-425d-b947-224eebb8386b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1272.305997] env[63538]: DEBUG nova.network.neutron [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1272.407434] env[63538]: DEBUG oslo_concurrency.lockutils [None req-401cf380-6f0f-4d8a-b090-1a25c761e6cd tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "afa669ca-26b3-4b9d-ac9d-abbc966d5798" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1272.557952] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2f1bb313-44f5-47c6-97d2-4fab77d6f46d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "e3feec17-ca1b-4873-bb0a-370c3868aabf" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1272.557952] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2f1bb313-44f5-47c6-97d2-4fab77d6f46d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "e3feec17-ca1b-4873-bb0a-370c3868aabf" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1272.570104] env[63538]: DEBUG oslo_concurrency.lockutils [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.165s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1272.570319] env[63538]: INFO nova.compute.manager [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Successfully reverted task state from resize_migrating on failure for instance. [ 1272.579292] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.877s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1272.579529] env[63538]: DEBUG nova.objects.instance [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lazy-loading 'resources' on Instance uuid 057f192d-b470-4683-b197-913457d10717 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server [None req-bd9f9e25-7a4f-484b-a88a-93d6e251e526 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Exception during message handling: AttributeError: 'NoneType' object has no attribute 'key' [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 172, in _process_incoming [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server raise self.value [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server raise self.value [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server raise self.value [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6117, in resize_instance [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server raise self.value [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6114, in resize_instance [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server self._resize_instance(context, instance, image, migration, [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6151, in _resize_instance [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server disk_info = self.driver.migrate_disk_and_power_off( [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 264, in migrate_disk_and_power_off [ 1272.581566] env[63538]: ERROR oslo_messaging.rpc.server return self._vmops.migrate_disk_and_power_off(context, instance, [ 1272.583170] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1467, in migrate_disk_and_power_off [ 1272.583170] env[63538]: ERROR oslo_messaging.rpc.server self._resize_disk(instance, vm_ref, vmdk, flavor) [ 1272.583170] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1398, in _resize_disk [ 1272.583170] env[63538]: ERROR oslo_messaging.rpc.server self._volumeops.detach_disk_from_vm(vm_ref, instance, vmdk.device) [ 1272.583170] env[63538]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 121, in detach_disk_from_vm [ 1272.583170] env[63538]: ERROR oslo_messaging.rpc.server disk_key = device.key [ 1272.583170] env[63538]: ERROR oslo_messaging.rpc.server AttributeError: 'NoneType' object has no attribute 'key' [ 1272.583170] env[63538]: ERROR oslo_messaging.rpc.server [ 1272.715232] env[63538]: DEBUG oslo_vmware.api [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101993, 'name': ReconfigVM_Task, 'duration_secs': 0.118982} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.715569] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992559', 'volume_id': 'f4f9e20f-58de-40de-b281-a78c4a4444b2', 'name': 'volume-f4f9e20f-58de-40de-b281-a78c4a4444b2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'afa669ca-26b3-4b9d-ac9d-abbc966d5798', 'attached_at': '', 'detached_at': '', 'volume_id': 'f4f9e20f-58de-40de-b281-a78c4a4444b2', 'serial': 'f4f9e20f-58de-40de-b281-a78c4a4444b2'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1272.715856] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1272.716962] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-783d56d9-eab7-40ec-88e9-dff931a2c27f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.723693] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1272.723931] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9a1288bf-ab49-460c-b565-6d6e7e85722a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.792935] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1272.793204] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1272.793392] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Deleting the datastore file [datastore2] afa669ca-26b3-4b9d-ac9d-abbc966d5798 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1272.793674] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a58e69c4-b05e-435f-b252-50b7d0f889e0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.801769] env[63538]: DEBUG oslo_vmware.api [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1272.801769] env[63538]: value = "task-5101995" [ 1272.801769] env[63538]: _type = "Task" [ 1272.801769] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.812228] env[63538]: DEBUG oslo_vmware.api [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101995, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.855925] env[63538]: DEBUG nova.network.neutron [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1273.001536] env[63538]: DEBUG nova.network.neutron [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Updating instance_info_cache with network_info: [{"id": "5f276ada-dc8e-4558-a169-bfcaf25172e8", "address": "fa:16:3e:59:b6:e8", "network": {"id": "690e6150-6e85-472d-baeb-85e69afd037b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1831468396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a06b7cc1ab24ba584bbe970e4fc5e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f276ada-dc", "ovs_interfaceid": "5f276ada-dc8e-4558-a169-bfcaf25172e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1273.060757] env[63538]: INFO nova.compute.manager [None req-2f1bb313-44f5-47c6-97d2-4fab77d6f46d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Detaching volume f5c40b11-75f1-482f-aa80-8e412c228ea5 [ 1273.097954] env[63538]: INFO nova.virt.block_device [None req-2f1bb313-44f5-47c6-97d2-4fab77d6f46d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Attempting to driver detach volume f5c40b11-75f1-482f-aa80-8e412c228ea5 from mountpoint /dev/sdb [ 1273.098234] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f1bb313-44f5-47c6-97d2-4fab77d6f46d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Volume detach. Driver type: vmdk {{(pid=63538) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1273.098430] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f1bb313-44f5-47c6-97d2-4fab77d6f46d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992551', 'volume_id': 'f5c40b11-75f1-482f-aa80-8e412c228ea5', 'name': 'volume-f5c40b11-75f1-482f-aa80-8e412c228ea5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e3feec17-ca1b-4873-bb0a-370c3868aabf', 'attached_at': '', 'detached_at': '', 'volume_id': 'f5c40b11-75f1-482f-aa80-8e412c228ea5', 'serial': 'f5c40b11-75f1-482f-aa80-8e412c228ea5'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1273.099438] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fb772ae-23fc-4f93-ac6f-5727a410e91a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.129871] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3846597-470c-41fe-a063-9fa1ab8778d6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.138666] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be755742-7d46-42fe-9d45-c180ca1cc41b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.167242] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d323f35-6b2c-4fd9-856e-43bb25ece53d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.184399] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f1bb313-44f5-47c6-97d2-4fab77d6f46d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] The volume has not been displaced from its original location: [datastore1] volume-f5c40b11-75f1-482f-aa80-8e412c228ea5/volume-f5c40b11-75f1-482f-aa80-8e412c228ea5.vmdk. No consolidation needed. {{(pid=63538) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1273.189915] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f1bb313-44f5-47c6-97d2-4fab77d6f46d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Reconfiguring VM instance instance-00000070 to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1273.192739] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09d60d31-2f6e-472f-90a9-b80102662978 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.218727] env[63538]: DEBUG oslo_vmware.api [None req-2f1bb313-44f5-47c6-97d2-4fab77d6f46d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1273.218727] env[63538]: value = "task-5101996" [ 1273.218727] env[63538]: _type = "Task" [ 1273.218727] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.234113] env[63538]: DEBUG oslo_vmware.api [None req-2f1bb313-44f5-47c6-97d2-4fab77d6f46d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101996, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.235690] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b907f8c1-b772-4897-b479-93cabe158410 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.243997] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0f0936-f6d9-4c1f-a106-075ad520371a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.276478] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f333ca-d3bb-44a0-baf2-87f72cb1ef21 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.285338] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6d9d297-41bf-4c35-8f98-2410bc5ec5ae {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.302013] env[63538]: DEBUG nova.compute.provider_tree [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1273.311927] env[63538]: DEBUG oslo_vmware.api [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101995, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084674} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.312810] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1273.313033] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1273.313224] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1273.313439] env[63538]: INFO nova.compute.manager [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Took 2.25 seconds to destroy the instance on the hypervisor. [ 1273.313678] env[63538]: DEBUG oslo.service.loopingcall [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1273.314135] env[63538]: DEBUG nova.compute.manager [-] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1273.314239] env[63538]: DEBUG nova.network.neutron [-] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1273.504729] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Releasing lock "refresh_cache-d6215939-5e06-425d-b947-224eebb8386b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1273.505175] env[63538]: DEBUG nova.compute.manager [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Instance network_info: |[{"id": "5f276ada-dc8e-4558-a169-bfcaf25172e8", "address": "fa:16:3e:59:b6:e8", "network": {"id": "690e6150-6e85-472d-baeb-85e69afd037b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1831468396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a06b7cc1ab24ba584bbe970e4fc5e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f276ada-dc", "ovs_interfaceid": "5f276ada-dc8e-4558-a169-bfcaf25172e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1273.505783] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:b6:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e31264e2-3e0a-4dfb-ba1f-6389d7d47548', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5f276ada-dc8e-4558-a169-bfcaf25172e8', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1273.513791] env[63538]: DEBUG oslo.service.loopingcall [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1273.514040] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6215939-5e06-425d-b947-224eebb8386b] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1273.514303] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0550fbd8-b072-461c-988d-bcb76a36bd7d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.534544] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1273.534544] env[63538]: value = "task-5101997" [ 1273.534544] env[63538]: _type = "Task" [ 1273.534544] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.542793] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101997, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.713602] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "e93aab2e-f8c4-4959-923f-0449a84108d6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1273.713896] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "e93aab2e-f8c4-4959-923f-0449a84108d6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1273.715104] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "e93aab2e-f8c4-4959-923f-0449a84108d6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1273.715104] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "e93aab2e-f8c4-4959-923f-0449a84108d6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1273.715104] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "e93aab2e-f8c4-4959-923f-0449a84108d6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1273.716928] env[63538]: INFO nova.compute.manager [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Terminating instance [ 1273.719278] env[63538]: DEBUG nova.compute.manager [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1273.719590] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1273.720426] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-559472de-70b8-4f08-b8ab-7ee87070961e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.733437] env[63538]: DEBUG oslo_vmware.api [None req-2f1bb313-44f5-47c6-97d2-4fab77d6f46d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101996, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.736882] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1273.737344] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-37becdb7-7b34-4464-a139-785ebe894a3d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.746210] env[63538]: DEBUG nova.compute.manager [req-f073007f-6065-4435-8a5f-ae436cf797b4 req-0be848a5-994b-4d70-b7ec-b961faabd778 service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Received event network-changed-5f276ada-dc8e-4558-a169-bfcaf25172e8 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1273.746429] env[63538]: DEBUG nova.compute.manager [req-f073007f-6065-4435-8a5f-ae436cf797b4 req-0be848a5-994b-4d70-b7ec-b961faabd778 service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Refreshing instance network info cache due to event network-changed-5f276ada-dc8e-4558-a169-bfcaf25172e8. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1273.746657] env[63538]: DEBUG oslo_concurrency.lockutils [req-f073007f-6065-4435-8a5f-ae436cf797b4 req-0be848a5-994b-4d70-b7ec-b961faabd778 service nova] Acquiring lock "refresh_cache-d6215939-5e06-425d-b947-224eebb8386b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1273.746978] env[63538]: DEBUG oslo_concurrency.lockutils [req-f073007f-6065-4435-8a5f-ae436cf797b4 req-0be848a5-994b-4d70-b7ec-b961faabd778 service nova] Acquired lock "refresh_cache-d6215939-5e06-425d-b947-224eebb8386b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1273.746978] env[63538]: DEBUG nova.network.neutron [req-f073007f-6065-4435-8a5f-ae436cf797b4 req-0be848a5-994b-4d70-b7ec-b961faabd778 service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Refreshing network info cache for port 5f276ada-dc8e-4558-a169-bfcaf25172e8 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1273.749310] env[63538]: DEBUG oslo_vmware.api [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1273.749310] env[63538]: value = "task-5101998" [ 1273.749310] env[63538]: _type = "Task" [ 1273.749310] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.759711] env[63538]: DEBUG oslo_vmware.api [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101998, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.807801] env[63538]: DEBUG nova.scheduler.client.report [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1274.045731] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101997, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.233769] env[63538]: DEBUG oslo_vmware.api [None req-2f1bb313-44f5-47c6-97d2-4fab77d6f46d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101996, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.241305] env[63538]: DEBUG nova.network.neutron [-] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1274.264187] env[63538]: DEBUG oslo_vmware.api [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101998, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.314980] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.733s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1274.336136] env[63538]: INFO nova.scheduler.client.report [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Deleted allocations for instance 057f192d-b470-4683-b197-913457d10717 [ 1274.546145] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5101997, 'name': CreateVM_Task, 'duration_secs': 0.745817} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.546548] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6215939-5e06-425d-b947-224eebb8386b] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1274.547077] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1274.547278] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1274.547711] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1274.547992] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6ccde2d-9080-4b33-ac54-f5ce44bf2ece {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.554629] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1274.554629] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520ed035-97e3-17e2-ae26-66d66eec1ec3" [ 1274.554629] env[63538]: _type = "Task" [ 1274.554629] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.566417] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520ed035-97e3-17e2-ae26-66d66eec1ec3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.695259] env[63538]: DEBUG nova.network.neutron [req-f073007f-6065-4435-8a5f-ae436cf797b4 req-0be848a5-994b-4d70-b7ec-b961faabd778 service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Updated VIF entry in instance network info cache for port 5f276ada-dc8e-4558-a169-bfcaf25172e8. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1274.695755] env[63538]: DEBUG nova.network.neutron [req-f073007f-6065-4435-8a5f-ae436cf797b4 req-0be848a5-994b-4d70-b7ec-b961faabd778 service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Updating instance_info_cache with network_info: [{"id": "5f276ada-dc8e-4558-a169-bfcaf25172e8", "address": "fa:16:3e:59:b6:e8", "network": {"id": "690e6150-6e85-472d-baeb-85e69afd037b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1831468396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a06b7cc1ab24ba584bbe970e4fc5e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f276ada-dc", "ovs_interfaceid": "5f276ada-dc8e-4558-a169-bfcaf25172e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1274.733274] env[63538]: DEBUG oslo_vmware.api [None req-2f1bb313-44f5-47c6-97d2-4fab77d6f46d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101996, 'name': ReconfigVM_Task, 'duration_secs': 1.251096} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.733488] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f1bb313-44f5-47c6-97d2-4fab77d6f46d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Reconfigured VM instance instance-00000070 to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1274.738251] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-357c3cc9-84d7-425f-9a39-dd13ab7d4db3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.748579] env[63538]: INFO nova.compute.manager [-] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Took 1.43 seconds to deallocate network for instance. [ 1274.757887] env[63538]: DEBUG oslo_vmware.api [None req-2f1bb313-44f5-47c6-97d2-4fab77d6f46d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1274.757887] env[63538]: value = "task-5101999" [ 1274.757887] env[63538]: _type = "Task" [ 1274.757887] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.764406] env[63538]: DEBUG oslo_vmware.api [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5101998, 'name': PowerOffVM_Task, 'duration_secs': 0.847604} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.765111] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1274.765323] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1274.765591] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8f7f7a01-96a3-4587-a0be-e61e0fc40a5a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.771088] env[63538]: DEBUG oslo_vmware.api [None req-2f1bb313-44f5-47c6-97d2-4fab77d6f46d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101999, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.836321] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1274.836668] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1274.836892] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Deleting the datastore file [datastore1] e93aab2e-f8c4-4959-923f-0449a84108d6 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1274.837486] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e549c16-1d60-40d6-bb00-ba11361c843d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.845190] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed2d7146-4e3f-4cde-a89c-938556c1e8e7 tempest-ServerActionsTestJSON-1096356934 tempest-ServerActionsTestJSON-1096356934-project-member] Lock "057f192d-b470-4683-b197-913457d10717" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.186s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1274.850210] env[63538]: DEBUG oslo_vmware.api [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1274.850210] env[63538]: value = "task-5102001" [ 1274.850210] env[63538]: _type = "Task" [ 1274.850210] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.859265] env[63538]: DEBUG oslo_vmware.api [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5102001, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.069314] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520ed035-97e3-17e2-ae26-66d66eec1ec3, 'name': SearchDatastore_Task, 'duration_secs': 0.01266} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.069627] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1275.069874] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1275.070126] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1275.070285] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1275.070472] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1275.070736] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8266b95d-c0b0-4e7d-bccf-cf4cc8ed3530 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.080638] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1275.080826] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1275.081603] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a240b342-a380-497b-87f5-5e3406d2a2b0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.087705] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1275.087705] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52fba2a6-7b5f-aee3-4be5-26d2fd8afe97" [ 1275.087705] env[63538]: _type = "Task" [ 1275.087705] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.097072] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52fba2a6-7b5f-aee3-4be5-26d2fd8afe97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.198866] env[63538]: DEBUG oslo_concurrency.lockutils [req-f073007f-6065-4435-8a5f-ae436cf797b4 req-0be848a5-994b-4d70-b7ec-b961faabd778 service nova] Releasing lock "refresh_cache-d6215939-5e06-425d-b947-224eebb8386b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1275.269486] env[63538]: DEBUG oslo_vmware.api [None req-2f1bb313-44f5-47c6-97d2-4fab77d6f46d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5101999, 'name': ReconfigVM_Task, 'duration_secs': 0.159036} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.269930] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f1bb313-44f5-47c6-97d2-4fab77d6f46d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992551', 'volume_id': 'f5c40b11-75f1-482f-aa80-8e412c228ea5', 'name': 'volume-f5c40b11-75f1-482f-aa80-8e412c228ea5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e3feec17-ca1b-4873-bb0a-370c3868aabf', 'attached_at': '', 'detached_at': '', 'volume_id': 'f5c40b11-75f1-482f-aa80-8e412c228ea5', 'serial': 'f5c40b11-75f1-482f-aa80-8e412c228ea5'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1275.301131] env[63538]: INFO nova.compute.manager [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Took 0.55 seconds to detach 1 volumes for instance. [ 1275.305094] env[63538]: DEBUG nova.compute.manager [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Deleting volume: f4f9e20f-58de-40de-b281-a78c4a4444b2 {{(pid=63538) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 1275.361994] env[63538]: DEBUG oslo_vmware.api [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5102001, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145605} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.362334] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1275.362678] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1275.362785] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1275.363248] env[63538]: INFO nova.compute.manager [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1275.363248] env[63538]: DEBUG oslo.service.loopingcall [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1275.363502] env[63538]: DEBUG nova.compute.manager [-] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1275.363548] env[63538]: DEBUG nova.network.neutron [-] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1275.602018] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52fba2a6-7b5f-aee3-4be5-26d2fd8afe97, 'name': SearchDatastore_Task, 'duration_secs': 0.012065} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.602018] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62cabb95-78a5-4fdd-9818-255e1743e62b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.607643] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1275.607643] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52313374-5e7e-18a1-b0e4-b58aa800c7b7" [ 1275.607643] env[63538]: _type = "Task" [ 1275.607643] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.621503] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52313374-5e7e-18a1-b0e4-b58aa800c7b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.789384] env[63538]: DEBUG nova.compute.manager [req-a8940e26-b414-4387-abab-6d8d75578bba req-0314e673-bc45-4633-a09a-013190b93948 service nova] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Received event network-vif-deleted-ec3a3da3-8713-4358-bd22-4e497a90c904 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1275.789619] env[63538]: DEBUG nova.compute.manager [req-a8940e26-b414-4387-abab-6d8d75578bba req-0314e673-bc45-4633-a09a-013190b93948 service nova] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Received event network-vif-deleted-056940fe-0d12-4a3b-a2be-582e970e06bf {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1275.789790] env[63538]: INFO nova.compute.manager [req-a8940e26-b414-4387-abab-6d8d75578bba req-0314e673-bc45-4633-a09a-013190b93948 service nova] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Neutron deleted interface 056940fe-0d12-4a3b-a2be-582e970e06bf; detaching it from the instance and deleting it from the info cache [ 1275.789965] env[63538]: DEBUG nova.network.neutron [req-a8940e26-b414-4387-abab-6d8d75578bba req-0314e673-bc45-4633-a09a-013190b93948 service nova] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1275.828778] env[63538]: DEBUG nova.objects.instance [None req-2f1bb313-44f5-47c6-97d2-4fab77d6f46d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lazy-loading 'flavor' on Instance uuid e3feec17-ca1b-4873-bb0a-370c3868aabf {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1275.844012] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1275.844304] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1275.844527] env[63538]: DEBUG nova.objects.instance [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lazy-loading 'resources' on Instance uuid afa669ca-26b3-4b9d-ac9d-abbc966d5798 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1276.119075] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52313374-5e7e-18a1-b0e4-b58aa800c7b7, 'name': SearchDatastore_Task, 'duration_secs': 0.010535} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.119500] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1276.119843] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] d6215939-5e06-425d-b947-224eebb8386b/d6215939-5e06-425d-b947-224eebb8386b.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1276.120198] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2f29ff53-639f-40b1-bc36-9d15cea9d832 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.129120] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1276.129120] env[63538]: value = "task-5102003" [ 1276.129120] env[63538]: _type = "Task" [ 1276.129120] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.133563] env[63538]: DEBUG nova.network.neutron [-] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1276.143937] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102003, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.292359] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1260710d-9a6a-4b78-b184-395fba912aa4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.304383] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0226915-9b60-4daa-ab6a-a7499008a58b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.338394] env[63538]: DEBUG nova.compute.manager [req-a8940e26-b414-4387-abab-6d8d75578bba req-0314e673-bc45-4633-a09a-013190b93948 service nova] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Detach interface failed, port_id=056940fe-0d12-4a3b-a2be-582e970e06bf, reason: Instance e93aab2e-f8c4-4959-923f-0449a84108d6 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1276.364370] env[63538]: DEBUG nova.scheduler.client.report [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Refreshing inventories for resource provider f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1276.380176] env[63538]: DEBUG nova.scheduler.client.report [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Updating ProviderTree inventory for provider f65218a4-1d3d-476a-9093-01cae92c8635 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1276.380474] env[63538]: DEBUG nova.compute.provider_tree [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Updating inventory in ProviderTree for provider f65218a4-1d3d-476a-9093-01cae92c8635 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1276.393306] env[63538]: DEBUG nova.scheduler.client.report [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Refreshing aggregate associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, aggregates: None {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1276.414856] env[63538]: DEBUG nova.scheduler.client.report [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Refreshing trait associations for resource provider f65218a4-1d3d-476a-9093-01cae92c8635, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63538) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1276.497068] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e03b137-fa51-4442-b56c-893aad0312e8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.509666] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-027bc5e0-8773-41d6-9a63-394156bf23bb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.543528] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1c1aa97-7014-41cf-8391-db35988148fa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.552127] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f19f5404-4617-4b15-bee9-31a3012c8ca1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.567647] env[63538]: DEBUG nova.compute.provider_tree [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1276.637799] env[63538]: INFO nova.compute.manager [-] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Took 1.27 seconds to deallocate network for instance. [ 1276.643311] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102003, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.498716} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.646297] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] d6215939-5e06-425d-b947-224eebb8386b/d6215939-5e06-425d-b947-224eebb8386b.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1276.646526] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1276.647204] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e86608bb-1e00-4028-8a22-151ede55be17 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.654845] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1276.654845] env[63538]: value = "task-5102004" [ 1276.654845] env[63538]: _type = "Task" [ 1276.654845] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.666758] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102004, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.841913] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2f1bb313-44f5-47c6-97d2-4fab77d6f46d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "e3feec17-ca1b-4873-bb0a-370c3868aabf" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.284s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.073245] env[63538]: DEBUG nova.scheduler.client.report [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1277.148940] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1277.166758] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102004, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069001} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.166982] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1277.167895] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ff7d839-1f6f-44c6-8cf6-8a8582267bb5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.190772] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] d6215939-5e06-425d-b947-224eebb8386b/d6215939-5e06-425d-b947-224eebb8386b.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1277.191109] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0af6acab-8101-410f-bc5c-1d37380248f0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.211606] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1277.211606] env[63538]: value = "task-5102005" [ 1277.211606] env[63538]: _type = "Task" [ 1277.211606] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.221882] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102005, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.578344] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.734s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.580677] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.432s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.580918] env[63538]: DEBUG nova.objects.instance [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lazy-loading 'resources' on Instance uuid e93aab2e-f8c4-4959-923f-0449a84108d6 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1277.600561] env[63538]: INFO nova.scheduler.client.report [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Deleted allocations for instance afa669ca-26b3-4b9d-ac9d-abbc966d5798 [ 1277.722037] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102005, 'name': ReconfigVM_Task, 'duration_secs': 0.308671} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.722037] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Reconfigured VM instance instance-0000007a to attach disk [datastore2] d6215939-5e06-425d-b947-224eebb8386b/d6215939-5e06-425d-b947-224eebb8386b.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1277.722712] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fa228552-2158-4c35-9c49-cbab9bd828e6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.730314] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1277.730314] env[63538]: value = "task-5102006" [ 1277.730314] env[63538]: _type = "Task" [ 1277.730314] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.738735] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102006, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.935778] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "e3feec17-ca1b-4873-bb0a-370c3868aabf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1277.936069] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "e3feec17-ca1b-4873-bb0a-370c3868aabf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.936300] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "e3feec17-ca1b-4873-bb0a-370c3868aabf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1277.936495] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "e3feec17-ca1b-4873-bb0a-370c3868aabf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.936677] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "e3feec17-ca1b-4873-bb0a-370c3868aabf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.939323] env[63538]: INFO nova.compute.manager [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Terminating instance [ 1277.941286] env[63538]: DEBUG nova.compute.manager [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1277.941489] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1277.942316] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6097f34d-041d-4121-a26a-26618384b92c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.950110] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1277.950348] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f637a7c8-3a8d-4b04-a1ff-e76ff9df2f59 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.956370] env[63538]: DEBUG oslo_vmware.api [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1277.956370] env[63538]: value = "task-5102007" [ 1277.956370] env[63538]: _type = "Task" [ 1277.956370] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.964289] env[63538]: DEBUG oslo_vmware.api [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102007, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.107790] env[63538]: DEBUG oslo_concurrency.lockutils [None req-5fa735fe-3411-4dbf-a209-fbfa03c5f65a tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "afa669ca-26b3-4b9d-ac9d-abbc966d5798" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.045s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.108897] env[63538]: DEBUG oslo_concurrency.lockutils [None req-401cf380-6f0f-4d8a-b090-1a25c761e6cd tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "afa669ca-26b3-4b9d-ac9d-abbc966d5798" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 5.702s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1278.109066] env[63538]: DEBUG oslo_concurrency.lockutils [None req-401cf380-6f0f-4d8a-b090-1a25c761e6cd tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "afa669ca-26b3-4b9d-ac9d-abbc966d5798-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1278.109283] env[63538]: DEBUG oslo_concurrency.lockutils [None req-401cf380-6f0f-4d8a-b090-1a25c761e6cd tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "afa669ca-26b3-4b9d-ac9d-abbc966d5798-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1278.109480] env[63538]: DEBUG oslo_concurrency.lockutils [None req-401cf380-6f0f-4d8a-b090-1a25c761e6cd tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "afa669ca-26b3-4b9d-ac9d-abbc966d5798-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.111211] env[63538]: INFO nova.compute.manager [None req-401cf380-6f0f-4d8a-b090-1a25c761e6cd tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Terminating instance [ 1278.113435] env[63538]: DEBUG nova.compute.manager [None req-401cf380-6f0f-4d8a-b090-1a25c761e6cd tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1278.113735] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9e11f16c-de7b-4275-b937-6500eef32b6d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.128157] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4c9f6b-a19d-4091-8775-687036f7223d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.164517] env[63538]: WARNING nova.virt.vmwareapi.driver [None req-401cf380-6f0f-4d8a-b090-1a25c761e6cd tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance afa669ca-26b3-4b9d-ac9d-abbc966d5798 could not be found. [ 1278.164794] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-401cf380-6f0f-4d8a-b090-1a25c761e6cd tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1278.165121] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-500502cf-fddd-44f2-81b4-477a35285fab {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.174399] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f8b0439-2d32-4ee2-b57c-32787f9f5f52 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.187944] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c521544-e9d4-49fa-b2b8-e053fd0b527c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.199233] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-156cf10f-9254-4887-a47e-e1a27d102670 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.210282] env[63538]: WARNING nova.virt.vmwareapi.vmops [None req-401cf380-6f0f-4d8a-b090-1a25c761e6cd tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance afa669ca-26b3-4b9d-ac9d-abbc966d5798 could not be found. [ 1278.210495] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-401cf380-6f0f-4d8a-b090-1a25c761e6cd tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1278.210682] env[63538]: INFO nova.compute.manager [None req-401cf380-6f0f-4d8a-b090-1a25c761e6cd tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Took 0.10 seconds to destroy the instance on the hypervisor. [ 1278.210934] env[63538]: DEBUG oslo.service.loopingcall [None req-401cf380-6f0f-4d8a-b090-1a25c761e6cd tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1278.211553] env[63538]: DEBUG nova.compute.manager [-] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1278.211659] env[63538]: DEBUG nova.network.neutron [-] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1278.242615] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ec6a5d-c828-4f08-86f6-61d85301d1d2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.253435] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102006, 'name': Rename_Task, 'duration_secs': 0.166283} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.254787] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31aa9e53-d97f-44d2-81f9-a3463abb2e7f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.258715] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1278.259505] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2410705e-efe7-4ac5-b0bb-91108be0ea6b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.272262] env[63538]: DEBUG nova.compute.provider_tree [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1278.274850] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1278.274850] env[63538]: value = "task-5102008" [ 1278.274850] env[63538]: _type = "Task" [ 1278.274850] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.284415] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102008, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.469180] env[63538]: DEBUG oslo_vmware.api [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102007, 'name': PowerOffVM_Task, 'duration_secs': 0.230727} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.469446] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1278.469626] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1278.469944] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-af4cb128-1944-4343-b0a7-17ee4de5a3d3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.533541] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1278.533826] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1278.534141] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Deleting the datastore file [datastore2] e3feec17-ca1b-4873-bb0a-370c3868aabf {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1278.534497] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9a488f13-a98a-4e79-84ec-a1a38569176c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.542321] env[63538]: DEBUG oslo_vmware.api [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1278.542321] env[63538]: value = "task-5102010" [ 1278.542321] env[63538]: _type = "Task" [ 1278.542321] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.550278] env[63538]: DEBUG oslo_vmware.api [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102010, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.745989] env[63538]: DEBUG nova.network.neutron [-] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1278.776823] env[63538]: DEBUG nova.scheduler.client.report [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1278.789239] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102008, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.054739] env[63538]: DEBUG oslo_vmware.api [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102010, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150364} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.054952] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1279.055162] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1279.055352] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1279.055539] env[63538]: INFO nova.compute.manager [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1279.055785] env[63538]: DEBUG oslo.service.loopingcall [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1279.055986] env[63538]: DEBUG nova.compute.manager [-] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1279.056092] env[63538]: DEBUG nova.network.neutron [-] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1279.249282] env[63538]: INFO nova.compute.manager [-] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Took 1.04 seconds to deallocate network for instance. [ 1279.264193] env[63538]: WARNING nova.volume.cinder [None req-401cf380-6f0f-4d8a-b090-1a25c761e6cd tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Attachment 6c566560-055e-4c55-9009-c09b3f55f604 does not exist. Ignoring.: cinderclient.exceptions.NotFound: Volume attachment could not be found with filter: attachment_id = 6c566560-055e-4c55-9009-c09b3f55f604. (HTTP 404) (Request-ID: req-4e87a060-41d2-48e1-9e3f-955d80ca933c) [ 1279.264504] env[63538]: INFO nova.compute.manager [None req-401cf380-6f0f-4d8a-b090-1a25c761e6cd tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Took 0.01 seconds to detach 1 volumes for instance. [ 1279.266760] env[63538]: DEBUG nova.compute.manager [None req-401cf380-6f0f-4d8a-b090-1a25c761e6cd tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Deleting volume: f4f9e20f-58de-40de-b281-a78c4a4444b2 {{(pid=63538) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 1279.285934] env[63538]: WARNING nova.compute.manager [None req-401cf380-6f0f-4d8a-b090-1a25c761e6cd tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Failed to delete volume: f4f9e20f-58de-40de-b281-a78c4a4444b2 due to Volume f4f9e20f-58de-40de-b281-a78c4a4444b2 could not be found.: nova.exception.VolumeNotFound: Volume f4f9e20f-58de-40de-b281-a78c4a4444b2 could not be found. [ 1279.289862] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.709s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1279.297847] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102008, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.321501] env[63538]: INFO nova.scheduler.client.report [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Deleted allocations for instance e93aab2e-f8c4-4959-923f-0449a84108d6 [ 1279.528805] env[63538]: DEBUG nova.compute.manager [req-b1a444cf-f52b-463b-9fa7-35f15f5ff3d2 req-cc6a6f94-4956-4942-b4f8-409ee0460822 service nova] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Received event network-vif-deleted-c42aed5e-d684-4b97-aade-4acca4902f3d {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1279.528805] env[63538]: INFO nova.compute.manager [req-b1a444cf-f52b-463b-9fa7-35f15f5ff3d2 req-cc6a6f94-4956-4942-b4f8-409ee0460822 service nova] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Neutron deleted interface c42aed5e-d684-4b97-aade-4acca4902f3d; detaching it from the instance and deleting it from the info cache [ 1279.528805] env[63538]: DEBUG nova.network.neutron [req-b1a444cf-f52b-463b-9fa7-35f15f5ff3d2 req-cc6a6f94-4956-4942-b4f8-409ee0460822 service nova] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1279.790330] env[63538]: DEBUG oslo_vmware.api [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102008, 'name': PowerOnVM_Task, 'duration_secs': 1.163883} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.790723] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1279.790824] env[63538]: INFO nova.compute.manager [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Took 8.46 seconds to spawn the instance on the hypervisor. [ 1279.791009] env[63538]: DEBUG nova.compute.manager [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1279.791805] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2116de46-ff1c-4288-9f8f-a50f634953d2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.794682] env[63538]: INFO nova.compute.manager [None req-401cf380-6f0f-4d8a-b090-1a25c761e6cd tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Instance disappeared during terminate [ 1279.794889] env[63538]: DEBUG oslo_concurrency.lockutils [None req-401cf380-6f0f-4d8a-b090-1a25c761e6cd tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "afa669ca-26b3-4b9d-ac9d-abbc966d5798" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 1.686s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1279.830763] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b8b1d557-05e2-487e-892f-c26d276d4112 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "e93aab2e-f8c4-4959-923f-0449a84108d6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.117s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1280.004692] env[63538]: DEBUG nova.network.neutron [-] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1280.031955] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-66d4fb92-5649-4927-9bad-cb3b403acd73 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.043210] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6bb8f09-27f6-4588-95f1-c71342f7ad9e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.073916] env[63538]: DEBUG nova.compute.manager [req-b1a444cf-f52b-463b-9fa7-35f15f5ff3d2 req-cc6a6f94-4956-4942-b4f8-409ee0460822 service nova] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Detach interface failed, port_id=c42aed5e-d684-4b97-aade-4acca4902f3d, reason: Instance e3feec17-ca1b-4873-bb0a-370c3868aabf could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1280.313023] env[63538]: INFO nova.compute.manager [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Took 13.25 seconds to build instance. [ 1280.507215] env[63538]: INFO nova.compute.manager [-] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Took 1.45 seconds to deallocate network for instance. [ 1280.583969] env[63538]: DEBUG nova.compute.manager [req-f935460e-c39b-4edc-ac73-9c1d96c7989a req-5c8120ec-3edd-4c8f-bd7f-daf21cf28bae service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Received event network-changed-5f276ada-dc8e-4558-a169-bfcaf25172e8 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1280.584077] env[63538]: DEBUG nova.compute.manager [req-f935460e-c39b-4edc-ac73-9c1d96c7989a req-5c8120ec-3edd-4c8f-bd7f-daf21cf28bae service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Refreshing instance network info cache due to event network-changed-5f276ada-dc8e-4558-a169-bfcaf25172e8. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1280.584275] env[63538]: DEBUG oslo_concurrency.lockutils [req-f935460e-c39b-4edc-ac73-9c1d96c7989a req-5c8120ec-3edd-4c8f-bd7f-daf21cf28bae service nova] Acquiring lock "refresh_cache-d6215939-5e06-425d-b947-224eebb8386b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1280.584362] env[63538]: DEBUG oslo_concurrency.lockutils [req-f935460e-c39b-4edc-ac73-9c1d96c7989a req-5c8120ec-3edd-4c8f-bd7f-daf21cf28bae service nova] Acquired lock "refresh_cache-d6215939-5e06-425d-b947-224eebb8386b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.584530] env[63538]: DEBUG nova.network.neutron [req-f935460e-c39b-4edc-ac73-9c1d96c7989a req-5c8120ec-3edd-4c8f-bd7f-daf21cf28bae service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Refreshing network info cache for port 5f276ada-dc8e-4558-a169-bfcaf25172e8 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1280.815323] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b2f75a69-2d30-4841-9a92-058c18d74058 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "d6215939-5e06-425d-b947-224eebb8386b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.767s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1281.015176] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1281.015665] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1281.015933] env[63538]: DEBUG nova.objects.instance [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lazy-loading 'resources' on Instance uuid e3feec17-ca1b-4873-bb0a-370c3868aabf {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1281.318705] env[63538]: DEBUG nova.network.neutron [req-f935460e-c39b-4edc-ac73-9c1d96c7989a req-5c8120ec-3edd-4c8f-bd7f-daf21cf28bae service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Updated VIF entry in instance network info cache for port 5f276ada-dc8e-4558-a169-bfcaf25172e8. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1281.319203] env[63538]: DEBUG nova.network.neutron [req-f935460e-c39b-4edc-ac73-9c1d96c7989a req-5c8120ec-3edd-4c8f-bd7f-daf21cf28bae service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Updating instance_info_cache with network_info: [{"id": "5f276ada-dc8e-4558-a169-bfcaf25172e8", "address": "fa:16:3e:59:b6:e8", "network": {"id": "690e6150-6e85-472d-baeb-85e69afd037b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1831468396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a06b7cc1ab24ba584bbe970e4fc5e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f276ada-dc", "ovs_interfaceid": "5f276ada-dc8e-4558-a169-bfcaf25172e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1281.574924] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50bede8d-3b1b-4839-9ef3-6fcdc79d5f86 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.583389] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b05230a-8c22-43b9-a98f-57ce767c8b65 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.616632] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ca406fa-651c-41f0-8411-9057611970df {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.626204] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac49a83-22f1-42b0-a516-47b9936a9d1f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.641743] env[63538]: DEBUG nova.compute.provider_tree [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1281.822643] env[63538]: DEBUG oslo_concurrency.lockutils [req-f935460e-c39b-4edc-ac73-9c1d96c7989a req-5c8120ec-3edd-4c8f-bd7f-daf21cf28bae service nova] Releasing lock "refresh_cache-d6215939-5e06-425d-b947-224eebb8386b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1282.145053] env[63538]: DEBUG nova.scheduler.client.report [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1282.650166] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.634s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.676821] env[63538]: INFO nova.scheduler.client.report [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Deleted allocations for instance e3feec17-ca1b-4873-bb0a-370c3868aabf [ 1283.185140] env[63538]: DEBUG oslo_concurrency.lockutils [None req-ed575514-44ac-484d-b637-d291d1d74831 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "e3feec17-ca1b-4873-bb0a-370c3868aabf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.249s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.562725] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "fb26fb32-a420-4667-850c-e32786edd8f2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.563026] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "fb26fb32-a420-4667-850c-e32786edd8f2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.563261] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "fb26fb32-a420-4667-850c-e32786edd8f2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.563455] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "fb26fb32-a420-4667-850c-e32786edd8f2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.563634] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "fb26fb32-a420-4667-850c-e32786edd8f2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.566458] env[63538]: INFO nova.compute.manager [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Terminating instance [ 1283.569935] env[63538]: DEBUG nova.compute.manager [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1283.570048] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1283.571094] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef750c4-e0d9-4da0-b369-4b5fe9a6ae35 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.579389] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1283.579646] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-af4e7bf7-7834-4cfe-9124-92ed0c2b29ff {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.586504] env[63538]: DEBUG oslo_vmware.api [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1283.586504] env[63538]: value = "task-5102012" [ 1283.586504] env[63538]: _type = "Task" [ 1283.586504] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.595346] env[63538]: DEBUG oslo_vmware.api [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5102012, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.097025] env[63538]: DEBUG oslo_vmware.api [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5102012, 'name': PowerOffVM_Task, 'duration_secs': 0.243096} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.097025] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1284.097218] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1284.097487] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eadec1f3-518c-42c5-8854-e7763bf3ab7a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.165904] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1284.166395] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1284.166552] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Deleting the datastore file [datastore2] fb26fb32-a420-4667-850c-e32786edd8f2 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1284.166747] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f9f3c336-3a9e-4421-afb7-d5188b04f489 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.174342] env[63538]: DEBUG oslo_vmware.api [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for the task: (returnval){ [ 1284.174342] env[63538]: value = "task-5102014" [ 1284.174342] env[63538]: _type = "Task" [ 1284.174342] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.183508] env[63538]: DEBUG oslo_vmware.api [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5102014, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.684998] env[63538]: DEBUG oslo_vmware.api [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Task: {'id': task-5102014, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.2949} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.685433] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1284.685485] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1284.685727] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1284.685923] env[63538]: INFO nova.compute.manager [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1284.686197] env[63538]: DEBUG oslo.service.loopingcall [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1284.686404] env[63538]: DEBUG nova.compute.manager [-] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1284.686502] env[63538]: DEBUG nova.network.neutron [-] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1284.944644] env[63538]: DEBUG nova.compute.manager [req-76581efc-cf15-40b0-a5ad-608181ed2787 req-855343d4-78cc-4cdb-9ed0-435e58989e5e service nova] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Received event network-vif-deleted-5250918c-5112-49ad-b1d3-f73c2d534637 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1284.944821] env[63538]: INFO nova.compute.manager [req-76581efc-cf15-40b0-a5ad-608181ed2787 req-855343d4-78cc-4cdb-9ed0-435e58989e5e service nova] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Neutron deleted interface 5250918c-5112-49ad-b1d3-f73c2d534637; detaching it from the instance and deleting it from the info cache [ 1284.944992] env[63538]: DEBUG nova.network.neutron [req-76581efc-cf15-40b0-a5ad-608181ed2787 req-855343d4-78cc-4cdb-9ed0-435e58989e5e service nova] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1285.052694] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "579c71bd-24f0-4257-856c-b24ddb2b9dba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1285.052966] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "579c71bd-24f0-4257-856c-b24ddb2b9dba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1285.421293] env[63538]: DEBUG nova.network.neutron [-] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1285.449131] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-394ac1fa-4ad8-4181-b11a-d8edfcfa62ac {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.460155] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96fa225e-a2af-47ee-b265-166d24b808e6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.488511] env[63538]: DEBUG nova.compute.manager [req-76581efc-cf15-40b0-a5ad-608181ed2787 req-855343d4-78cc-4cdb-9ed0-435e58989e5e service nova] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Detach interface failed, port_id=5250918c-5112-49ad-b1d3-f73c2d534637, reason: Instance fb26fb32-a420-4667-850c-e32786edd8f2 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1285.556026] env[63538]: DEBUG nova.compute.manager [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1285.924215] env[63538]: INFO nova.compute.manager [-] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Took 1.24 seconds to deallocate network for instance. [ 1286.077724] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1286.078018] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1286.079996] env[63538]: INFO nova.compute.claims [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1286.431464] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1287.140164] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad26a44-a586-44f0-8dd5-892216c1512b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.149663] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e1d0dad-ea8b-4de2-beb1-47a90c56895b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.180837] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c2a520-4ed4-4d6b-8fc3-39be8a88e057 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.189138] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a119d4c8-be26-4234-91ba-2ae3199bfa23 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.203063] env[63538]: DEBUG nova.compute.provider_tree [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1287.706711] env[63538]: DEBUG nova.scheduler.client.report [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1288.211704] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.133s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1288.212291] env[63538]: DEBUG nova.compute.manager [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1288.215072] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.784s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.215306] env[63538]: DEBUG nova.objects.instance [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lazy-loading 'resources' on Instance uuid fb26fb32-a420-4667-850c-e32786edd8f2 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1288.718122] env[63538]: DEBUG nova.compute.utils [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1288.722839] env[63538]: DEBUG nova.compute.manager [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1288.722839] env[63538]: DEBUG nova.network.neutron [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1288.774281] env[63538]: DEBUG nova.policy [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb444448a4d64c5e8ec9613ed633a527', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9b1eba931f144b94b6e186dac1310dfa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1288.777711] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f85c2d-d179-4fde-adc2-66a9abde8adb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.785529] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ab3358-7ce8-48c6-ad5a-77c6b727c9fc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.819410] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c3e18d7-1691-40bf-b55e-62b860931014 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.827407] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b60a202f-6a27-4ef5-a1d4-c03a618fefaf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.841141] env[63538]: DEBUG nova.compute.provider_tree [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1289.082936] env[63538]: DEBUG nova.network.neutron [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Successfully created port: 8e02ffa4-5076-4042-a6e3-4a7142802a93 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1289.223929] env[63538]: DEBUG nova.compute.manager [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1289.343624] env[63538]: DEBUG nova.scheduler.client.report [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1289.849089] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.634s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1289.872511] env[63538]: INFO nova.scheduler.client.report [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Deleted allocations for instance fb26fb32-a420-4667-850c-e32786edd8f2 [ 1290.234642] env[63538]: DEBUG nova.compute.manager [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1290.263582] env[63538]: DEBUG nova.virt.hardware [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1290.264303] env[63538]: DEBUG nova.virt.hardware [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1290.264303] env[63538]: DEBUG nova.virt.hardware [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1290.264303] env[63538]: DEBUG nova.virt.hardware [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1290.264484] env[63538]: DEBUG nova.virt.hardware [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1290.264602] env[63538]: DEBUG nova.virt.hardware [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1290.264827] env[63538]: DEBUG nova.virt.hardware [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1290.264997] env[63538]: DEBUG nova.virt.hardware [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1290.265193] env[63538]: DEBUG nova.virt.hardware [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1290.265371] env[63538]: DEBUG nova.virt.hardware [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1290.265561] env[63538]: DEBUG nova.virt.hardware [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1290.266538] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce950af-bc26-49c0-8785-f6b00d7bb98a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.275884] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7339c588-6674-4d26-aea0-7bf0ada2b21d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.380579] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a95e277b-651a-4183-bc02-c2ed0fdd3358 tempest-ServerActionsTestOtherA-1545658458 tempest-ServerActionsTestOtherA-1545658458-project-member] Lock "fb26fb32-a420-4667-850c-e32786edd8f2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.817s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1290.514339] env[63538]: DEBUG nova.compute.manager [req-0aba5371-93b6-4c18-909c-a28c481d78f1 req-c0d81a6b-5638-4b53-acaa-28197d1eaa78 service nova] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Received event network-vif-plugged-8e02ffa4-5076-4042-a6e3-4a7142802a93 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1290.514608] env[63538]: DEBUG oslo_concurrency.lockutils [req-0aba5371-93b6-4c18-909c-a28c481d78f1 req-c0d81a6b-5638-4b53-acaa-28197d1eaa78 service nova] Acquiring lock "579c71bd-24f0-4257-856c-b24ddb2b9dba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1290.514839] env[63538]: DEBUG oslo_concurrency.lockutils [req-0aba5371-93b6-4c18-909c-a28c481d78f1 req-c0d81a6b-5638-4b53-acaa-28197d1eaa78 service nova] Lock "579c71bd-24f0-4257-856c-b24ddb2b9dba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1290.515062] env[63538]: DEBUG oslo_concurrency.lockutils [req-0aba5371-93b6-4c18-909c-a28c481d78f1 req-c0d81a6b-5638-4b53-acaa-28197d1eaa78 service nova] Lock "579c71bd-24f0-4257-856c-b24ddb2b9dba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1290.515353] env[63538]: DEBUG nova.compute.manager [req-0aba5371-93b6-4c18-909c-a28c481d78f1 req-c0d81a6b-5638-4b53-acaa-28197d1eaa78 service nova] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] No waiting events found dispatching network-vif-plugged-8e02ffa4-5076-4042-a6e3-4a7142802a93 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1290.515414] env[63538]: WARNING nova.compute.manager [req-0aba5371-93b6-4c18-909c-a28c481d78f1 req-c0d81a6b-5638-4b53-acaa-28197d1eaa78 service nova] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Received unexpected event network-vif-plugged-8e02ffa4-5076-4042-a6e3-4a7142802a93 for instance with vm_state building and task_state spawning. [ 1290.621923] env[63538]: DEBUG nova.network.neutron [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Successfully updated port: 8e02ffa4-5076-4042-a6e3-4a7142802a93 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1291.125037] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "refresh_cache-579c71bd-24f0-4257-856c-b24ddb2b9dba" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1291.125037] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquired lock "refresh_cache-579c71bd-24f0-4257-856c-b24ddb2b9dba" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1291.125306] env[63538]: DEBUG nova.network.neutron [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1291.673275] env[63538]: DEBUG nova.network.neutron [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1291.904094] env[63538]: DEBUG nova.network.neutron [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Updating instance_info_cache with network_info: [{"id": "8e02ffa4-5076-4042-a6e3-4a7142802a93", "address": "fa:16:3e:c0:22:74", "network": {"id": "64d377b2-188d-4b62-8b84-64d9c351ca10", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1354632003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b1eba931f144b94b6e186dac1310dfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e02ffa4-50", "ovs_interfaceid": "8e02ffa4-5076-4042-a6e3-4a7142802a93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1292.406587] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Releasing lock "refresh_cache-579c71bd-24f0-4257-856c-b24ddb2b9dba" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1292.406869] env[63538]: DEBUG nova.compute.manager [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Instance network_info: |[{"id": "8e02ffa4-5076-4042-a6e3-4a7142802a93", "address": "fa:16:3e:c0:22:74", "network": {"id": "64d377b2-188d-4b62-8b84-64d9c351ca10", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1354632003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b1eba931f144b94b6e186dac1310dfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e02ffa4-50", "ovs_interfaceid": "8e02ffa4-5076-4042-a6e3-4a7142802a93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1292.407341] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:22:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37434b93-dfdc-4a3f-bf5a-9f2cbe25a754', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8e02ffa4-5076-4042-a6e3-4a7142802a93', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1292.415713] env[63538]: DEBUG oslo.service.loopingcall [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1292.415964] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1292.416220] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d02908de-e44b-4367-afa3-9868bdf1ed85 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.436995] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1292.436995] env[63538]: value = "task-5102015" [ 1292.436995] env[63538]: _type = "Task" [ 1292.436995] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.445236] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5102015, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.552583] env[63538]: DEBUG nova.compute.manager [req-d98e8fbd-d267-469b-9a58-2b7ff4c73eb2 req-ea255664-cdc3-4cd2-8b5d-9bd0e6d75ba1 service nova] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Received event network-changed-8e02ffa4-5076-4042-a6e3-4a7142802a93 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1292.552820] env[63538]: DEBUG nova.compute.manager [req-d98e8fbd-d267-469b-9a58-2b7ff4c73eb2 req-ea255664-cdc3-4cd2-8b5d-9bd0e6d75ba1 service nova] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Refreshing instance network info cache due to event network-changed-8e02ffa4-5076-4042-a6e3-4a7142802a93. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1292.553016] env[63538]: DEBUG oslo_concurrency.lockutils [req-d98e8fbd-d267-469b-9a58-2b7ff4c73eb2 req-ea255664-cdc3-4cd2-8b5d-9bd0e6d75ba1 service nova] Acquiring lock "refresh_cache-579c71bd-24f0-4257-856c-b24ddb2b9dba" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1292.553199] env[63538]: DEBUG oslo_concurrency.lockutils [req-d98e8fbd-d267-469b-9a58-2b7ff4c73eb2 req-ea255664-cdc3-4cd2-8b5d-9bd0e6d75ba1 service nova] Acquired lock "refresh_cache-579c71bd-24f0-4257-856c-b24ddb2b9dba" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1292.553362] env[63538]: DEBUG nova.network.neutron [req-d98e8fbd-d267-469b-9a58-2b7ff4c73eb2 req-ea255664-cdc3-4cd2-8b5d-9bd0e6d75ba1 service nova] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Refreshing network info cache for port 8e02ffa4-5076-4042-a6e3-4a7142802a93 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1292.946484] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5102015, 'name': CreateVM_Task, 'duration_secs': 0.34502} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.946826] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1292.947392] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1292.947577] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1292.947948] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1292.948237] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14e4ea2a-dcd6-4817-948b-02fd10f23ac1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.953343] env[63538]: DEBUG oslo_vmware.api [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1292.953343] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5267f926-5d33-a0cf-c4ed-c6b9b32bafd0" [ 1292.953343] env[63538]: _type = "Task" [ 1292.953343] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.962557] env[63538]: DEBUG oslo_vmware.api [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5267f926-5d33-a0cf-c4ed-c6b9b32bafd0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.334234] env[63538]: DEBUG nova.network.neutron [req-d98e8fbd-d267-469b-9a58-2b7ff4c73eb2 req-ea255664-cdc3-4cd2-8b5d-9bd0e6d75ba1 service nova] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Updated VIF entry in instance network info cache for port 8e02ffa4-5076-4042-a6e3-4a7142802a93. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1293.334630] env[63538]: DEBUG nova.network.neutron [req-d98e8fbd-d267-469b-9a58-2b7ff4c73eb2 req-ea255664-cdc3-4cd2-8b5d-9bd0e6d75ba1 service nova] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Updating instance_info_cache with network_info: [{"id": "8e02ffa4-5076-4042-a6e3-4a7142802a93", "address": "fa:16:3e:c0:22:74", "network": {"id": "64d377b2-188d-4b62-8b84-64d9c351ca10", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1354632003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b1eba931f144b94b6e186dac1310dfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e02ffa4-50", "ovs_interfaceid": "8e02ffa4-5076-4042-a6e3-4a7142802a93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1293.466027] env[63538]: DEBUG oslo_vmware.api [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5267f926-5d33-a0cf-c4ed-c6b9b32bafd0, 'name': SearchDatastore_Task, 'duration_secs': 0.011612} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.466027] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1293.466252] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1293.466413] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1293.466576] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1293.466764] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1293.467079] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af15a419-c091-44f4-b80b-97c875eaa9f4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.476760] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1293.476961] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1293.478045] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc6ab6f7-c71a-4fe4-ba5d-5cdd114007d7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.485414] env[63538]: DEBUG oslo_vmware.api [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1293.485414] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52aa2b59-cb1f-47bb-966d-688cf1ddc8db" [ 1293.485414] env[63538]: _type = "Task" [ 1293.485414] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.493340] env[63538]: DEBUG oslo_vmware.api [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52aa2b59-cb1f-47bb-966d-688cf1ddc8db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.837898] env[63538]: DEBUG oslo_concurrency.lockutils [req-d98e8fbd-d267-469b-9a58-2b7ff4c73eb2 req-ea255664-cdc3-4cd2-8b5d-9bd0e6d75ba1 service nova] Releasing lock "refresh_cache-579c71bd-24f0-4257-856c-b24ddb2b9dba" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1294.000776] env[63538]: DEBUG oslo_vmware.api [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52aa2b59-cb1f-47bb-966d-688cf1ddc8db, 'name': SearchDatastore_Task, 'duration_secs': 0.010403} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.002185] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bf50b68-d322-469e-adbd-6ad81883b75d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.010640] env[63538]: DEBUG oslo_vmware.api [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1294.010640] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a84ee2-4bfb-0fcb-927d-fd683e925898" [ 1294.010640] env[63538]: _type = "Task" [ 1294.010640] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.022536] env[63538]: DEBUG oslo_vmware.api [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a84ee2-4bfb-0fcb-927d-fd683e925898, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.521575] env[63538]: DEBUG oslo_vmware.api [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52a84ee2-4bfb-0fcb-927d-fd683e925898, 'name': SearchDatastore_Task, 'duration_secs': 0.010917} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.521872] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1294.522114] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 579c71bd-24f0-4257-856c-b24ddb2b9dba/579c71bd-24f0-4257-856c-b24ddb2b9dba.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1294.522385] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4e700b07-32c2-4e93-a2b8-5e79fdfa1fbd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.530835] env[63538]: DEBUG oslo_vmware.api [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1294.530835] env[63538]: value = "task-5102016" [ 1294.530835] env[63538]: _type = "Task" [ 1294.530835] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.539917] env[63538]: DEBUG oslo_vmware.api [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102016, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.041748] env[63538]: DEBUG oslo_vmware.api [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102016, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469537} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.042124] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 579c71bd-24f0-4257-856c-b24ddb2b9dba/579c71bd-24f0-4257-856c-b24ddb2b9dba.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1295.042260] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1295.042565] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-75a14ca9-a547-4d29-98d9-e7435c78375f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.053696] env[63538]: DEBUG oslo_vmware.api [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1295.053696] env[63538]: value = "task-5102017" [ 1295.053696] env[63538]: _type = "Task" [ 1295.053696] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.063812] env[63538]: DEBUG oslo_vmware.api [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102017, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.562874] env[63538]: DEBUG oslo_vmware.api [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102017, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072067} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.563168] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1295.563950] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cfd271c-0b71-4440-ac01-080467c4cb74 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.587675] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] 579c71bd-24f0-4257-856c-b24ddb2b9dba/579c71bd-24f0-4257-856c-b24ddb2b9dba.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1295.587675] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-645bc2a5-36a8-4b28-b9e3-e1b9028b6de1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.607248] env[63538]: DEBUG oslo_vmware.api [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1295.607248] env[63538]: value = "task-5102018" [ 1295.607248] env[63538]: _type = "Task" [ 1295.607248] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.615223] env[63538]: DEBUG oslo_vmware.api [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102018, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.117882] env[63538]: DEBUG oslo_vmware.api [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102018, 'name': ReconfigVM_Task, 'duration_secs': 0.341463} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.117882] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Reconfigured VM instance instance-0000007b to attach disk [datastore1] 579c71bd-24f0-4257-856c-b24ddb2b9dba/579c71bd-24f0-4257-856c-b24ddb2b9dba.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1296.118563] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-44e97822-1cde-4624-b9bd-132b4e5b6b95 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.126139] env[63538]: DEBUG oslo_vmware.api [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1296.126139] env[63538]: value = "task-5102019" [ 1296.126139] env[63538]: _type = "Task" [ 1296.126139] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.135536] env[63538]: DEBUG oslo_vmware.api [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102019, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.364184] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquiring lock "2f7bc37b-36c6-404a-82a9-c2b0d4a72439" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1296.364416] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Lock "2f7bc37b-36c6-404a-82a9-c2b0d4a72439" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1296.639036] env[63538]: DEBUG oslo_vmware.api [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102019, 'name': Rename_Task, 'duration_secs': 0.15925} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.639036] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1296.639036] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-420f85bc-3c04-4ded-8467-ff61c2564395 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.645916] env[63538]: DEBUG oslo_vmware.api [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1296.645916] env[63538]: value = "task-5102020" [ 1296.645916] env[63538]: _type = "Task" [ 1296.645916] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.655668] env[63538]: DEBUG oslo_vmware.api [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102020, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.867692] env[63538]: DEBUG nova.compute.manager [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1297.157356] env[63538]: DEBUG oslo_vmware.api [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102020, 'name': PowerOnVM_Task, 'duration_secs': 0.468014} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.158020] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1297.158020] env[63538]: INFO nova.compute.manager [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Took 6.92 seconds to spawn the instance on the hypervisor. [ 1297.158195] env[63538]: DEBUG nova.compute.manager [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1297.159360] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62f23d9d-f0f2-41d9-a891-bca2d81bdd60 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.390271] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1297.390566] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1297.392146] env[63538]: INFO nova.compute.claims [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1297.679075] env[63538]: INFO nova.compute.manager [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Took 11.62 seconds to build instance. [ 1298.181166] env[63538]: DEBUG oslo_concurrency.lockutils [None req-3bdf62d6-6d40-4a41-a8ff-72d0cfcc6510 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "579c71bd-24f0-4257-856c-b24ddb2b9dba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.128s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.332616] env[63538]: DEBUG nova.compute.manager [req-436b64c2-76f1-4667-9ad7-aed488c3575f req-e879f254-ff6d-4925-9548-4e496f855820 service nova] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Received event network-changed-8e02ffa4-5076-4042-a6e3-4a7142802a93 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1298.332616] env[63538]: DEBUG nova.compute.manager [req-436b64c2-76f1-4667-9ad7-aed488c3575f req-e879f254-ff6d-4925-9548-4e496f855820 service nova] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Refreshing instance network info cache due to event network-changed-8e02ffa4-5076-4042-a6e3-4a7142802a93. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1298.332616] env[63538]: DEBUG oslo_concurrency.lockutils [req-436b64c2-76f1-4667-9ad7-aed488c3575f req-e879f254-ff6d-4925-9548-4e496f855820 service nova] Acquiring lock "refresh_cache-579c71bd-24f0-4257-856c-b24ddb2b9dba" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1298.332969] env[63538]: DEBUG oslo_concurrency.lockutils [req-436b64c2-76f1-4667-9ad7-aed488c3575f req-e879f254-ff6d-4925-9548-4e496f855820 service nova] Acquired lock "refresh_cache-579c71bd-24f0-4257-856c-b24ddb2b9dba" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1298.332969] env[63538]: DEBUG nova.network.neutron [req-436b64c2-76f1-4667-9ad7-aed488c3575f req-e879f254-ff6d-4925-9548-4e496f855820 service nova] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Refreshing network info cache for port 8e02ffa4-5076-4042-a6e3-4a7142802a93 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1298.457518] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01738489-9a63-431a-8684-d583ce773a89 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.465947] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36d9e7bd-71a2-42e9-8ad6-548153d3891f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.500076] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da27554-ae12-42da-969b-56ac11767687 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.508326] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f498e571-5bbd-4cd0-a3b0-302ac450445d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.523499] env[63538]: DEBUG nova.compute.provider_tree [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1299.026318] env[63538]: DEBUG nova.scheduler.client.report [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1299.054684] env[63538]: DEBUG nova.network.neutron [req-436b64c2-76f1-4667-9ad7-aed488c3575f req-e879f254-ff6d-4925-9548-4e496f855820 service nova] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Updated VIF entry in instance network info cache for port 8e02ffa4-5076-4042-a6e3-4a7142802a93. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1299.055091] env[63538]: DEBUG nova.network.neutron [req-436b64c2-76f1-4667-9ad7-aed488c3575f req-e879f254-ff6d-4925-9548-4e496f855820 service nova] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Updating instance_info_cache with network_info: [{"id": "8e02ffa4-5076-4042-a6e3-4a7142802a93", "address": "fa:16:3e:c0:22:74", "network": {"id": "64d377b2-188d-4b62-8b84-64d9c351ca10", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1354632003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b1eba931f144b94b6e186dac1310dfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e02ffa4-50", "ovs_interfaceid": "8e02ffa4-5076-4042-a6e3-4a7142802a93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1299.532018] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.141s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1299.532569] env[63538]: DEBUG nova.compute.manager [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1299.558292] env[63538]: DEBUG oslo_concurrency.lockutils [req-436b64c2-76f1-4667-9ad7-aed488c3575f req-e879f254-ff6d-4925-9548-4e496f855820 service nova] Releasing lock "refresh_cache-579c71bd-24f0-4257-856c-b24ddb2b9dba" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1300.037427] env[63538]: DEBUG nova.compute.utils [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1300.039383] env[63538]: DEBUG nova.compute.manager [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1300.039591] env[63538]: DEBUG nova.network.neutron [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1300.085666] env[63538]: DEBUG nova.policy [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '534d33f66f17494c835f4e9d21b48909', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '329e471d86b9451b86b0b28b2824eae5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1300.351537] env[63538]: DEBUG nova.network.neutron [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Successfully created port: eb5819a0-9549-4a1a-af36-c661faf9b44f {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1300.543123] env[63538]: DEBUG nova.compute.manager [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1301.553864] env[63538]: DEBUG nova.compute.manager [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1301.580022] env[63538]: DEBUG nova.virt.hardware [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1301.580358] env[63538]: DEBUG nova.virt.hardware [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1301.580535] env[63538]: DEBUG nova.virt.hardware [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1301.580728] env[63538]: DEBUG nova.virt.hardware [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1301.580884] env[63538]: DEBUG nova.virt.hardware [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1301.581052] env[63538]: DEBUG nova.virt.hardware [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1301.581274] env[63538]: DEBUG nova.virt.hardware [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1301.581441] env[63538]: DEBUG nova.virt.hardware [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1301.581617] env[63538]: DEBUG nova.virt.hardware [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1301.581788] env[63538]: DEBUG nova.virt.hardware [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1301.581970] env[63538]: DEBUG nova.virt.hardware [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1301.582854] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad17662b-b636-42c2-9fb5-52aef5028327 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.591541] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-745962c1-0a08-4739-b8e6-9252f441bd5f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.749220] env[63538]: DEBUG nova.compute.manager [req-340a3f9d-dced-468e-bf60-20a3b19174b8 req-f96de8a9-a69a-4f75-85d4-37050ed5c5cb service nova] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Received event network-vif-plugged-eb5819a0-9549-4a1a-af36-c661faf9b44f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1301.749563] env[63538]: DEBUG oslo_concurrency.lockutils [req-340a3f9d-dced-468e-bf60-20a3b19174b8 req-f96de8a9-a69a-4f75-85d4-37050ed5c5cb service nova] Acquiring lock "2f7bc37b-36c6-404a-82a9-c2b0d4a72439-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1301.749860] env[63538]: DEBUG oslo_concurrency.lockutils [req-340a3f9d-dced-468e-bf60-20a3b19174b8 req-f96de8a9-a69a-4f75-85d4-37050ed5c5cb service nova] Lock "2f7bc37b-36c6-404a-82a9-c2b0d4a72439-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1301.749860] env[63538]: DEBUG oslo_concurrency.lockutils [req-340a3f9d-dced-468e-bf60-20a3b19174b8 req-f96de8a9-a69a-4f75-85d4-37050ed5c5cb service nova] Lock "2f7bc37b-36c6-404a-82a9-c2b0d4a72439-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1301.750013] env[63538]: DEBUG nova.compute.manager [req-340a3f9d-dced-468e-bf60-20a3b19174b8 req-f96de8a9-a69a-4f75-85d4-37050ed5c5cb service nova] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] No waiting events found dispatching network-vif-plugged-eb5819a0-9549-4a1a-af36-c661faf9b44f {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1301.750438] env[63538]: WARNING nova.compute.manager [req-340a3f9d-dced-468e-bf60-20a3b19174b8 req-f96de8a9-a69a-4f75-85d4-37050ed5c5cb service nova] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Received unexpected event network-vif-plugged-eb5819a0-9549-4a1a-af36-c661faf9b44f for instance with vm_state building and task_state spawning. [ 1301.838172] env[63538]: DEBUG nova.network.neutron [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Successfully updated port: eb5819a0-9549-4a1a-af36-c661faf9b44f {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1302.341730] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquiring lock "refresh_cache-2f7bc37b-36c6-404a-82a9-c2b0d4a72439" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1302.341796] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquired lock "refresh_cache-2f7bc37b-36c6-404a-82a9-c2b0d4a72439" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1302.341962] env[63538]: DEBUG nova.network.neutron [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1302.873729] env[63538]: DEBUG nova.network.neutron [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1302.992235] env[63538]: DEBUG nova.network.neutron [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Updating instance_info_cache with network_info: [{"id": "eb5819a0-9549-4a1a-af36-c661faf9b44f", "address": "fa:16:3e:0b:65:13", "network": {"id": "59211045-9e6b-4999-8ba5-54e53c962714", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1134659557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "329e471d86b9451b86b0b28b2824eae5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a06a63d6-2aeb-4084-8022-f804cac3fa74", "external-id": "nsx-vlan-transportzone-797", "segmentation_id": 797, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb5819a0-95", "ovs_interfaceid": "eb5819a0-9549-4a1a-af36-c661faf9b44f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1303.495434] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Releasing lock "refresh_cache-2f7bc37b-36c6-404a-82a9-c2b0d4a72439" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1303.495799] env[63538]: DEBUG nova.compute.manager [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Instance network_info: |[{"id": "eb5819a0-9549-4a1a-af36-c661faf9b44f", "address": "fa:16:3e:0b:65:13", "network": {"id": "59211045-9e6b-4999-8ba5-54e53c962714", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1134659557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "329e471d86b9451b86b0b28b2824eae5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a06a63d6-2aeb-4084-8022-f804cac3fa74", "external-id": "nsx-vlan-transportzone-797", "segmentation_id": 797, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb5819a0-95", "ovs_interfaceid": "eb5819a0-9549-4a1a-af36-c661faf9b44f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1303.496291] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:65:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a06a63d6-2aeb-4084-8022-f804cac3fa74', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eb5819a0-9549-4a1a-af36-c661faf9b44f', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1303.503837] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Creating folder: Project (329e471d86b9451b86b0b28b2824eae5). Parent ref: group-v992234. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1303.504140] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f9450631-e7d6-4bd1-8fd8-afd34682b85e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.515773] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Created folder: Project (329e471d86b9451b86b0b28b2824eae5) in parent group-v992234. [ 1303.515966] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Creating folder: Instances. Parent ref: group-v992563. {{(pid=63538) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1303.516222] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81c02fcc-e2fb-4424-a7d8-36edfae6cb2a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.526448] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Created folder: Instances in parent group-v992563. [ 1303.526698] env[63538]: DEBUG oslo.service.loopingcall [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1303.526900] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1303.527134] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fea6b1c0-6ca8-40cb-a805-8001a6bd4277 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.546501] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1303.546501] env[63538]: value = "task-5102023" [ 1303.546501] env[63538]: _type = "Task" [ 1303.546501] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.554578] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5102023, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.779414] env[63538]: DEBUG nova.compute.manager [req-8ee4c220-8ab5-4921-aacb-4b9577c9f9a7 req-57766c96-6073-4ea2-96d5-2f6791c730ec service nova] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Received event network-changed-eb5819a0-9549-4a1a-af36-c661faf9b44f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1303.779552] env[63538]: DEBUG nova.compute.manager [req-8ee4c220-8ab5-4921-aacb-4b9577c9f9a7 req-57766c96-6073-4ea2-96d5-2f6791c730ec service nova] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Refreshing instance network info cache due to event network-changed-eb5819a0-9549-4a1a-af36-c661faf9b44f. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1303.779782] env[63538]: DEBUG oslo_concurrency.lockutils [req-8ee4c220-8ab5-4921-aacb-4b9577c9f9a7 req-57766c96-6073-4ea2-96d5-2f6791c730ec service nova] Acquiring lock "refresh_cache-2f7bc37b-36c6-404a-82a9-c2b0d4a72439" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1303.779933] env[63538]: DEBUG oslo_concurrency.lockutils [req-8ee4c220-8ab5-4921-aacb-4b9577c9f9a7 req-57766c96-6073-4ea2-96d5-2f6791c730ec service nova] Acquired lock "refresh_cache-2f7bc37b-36c6-404a-82a9-c2b0d4a72439" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1303.780133] env[63538]: DEBUG nova.network.neutron [req-8ee4c220-8ab5-4921-aacb-4b9577c9f9a7 req-57766c96-6073-4ea2-96d5-2f6791c730ec service nova] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Refreshing network info cache for port eb5819a0-9549-4a1a-af36-c661faf9b44f {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1304.057891] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5102023, 'name': CreateVM_Task, 'duration_secs': 0.326146} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.058323] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1304.058815] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1304.059042] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1304.059412] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1304.059698] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d93b6a05-f768-4c86-9846-e26bddfae444 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.064780] env[63538]: DEBUG oslo_vmware.api [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1304.064780] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52272697-61b3-0f0d-cca3-6bdc164e8fd8" [ 1304.064780] env[63538]: _type = "Task" [ 1304.064780] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.073073] env[63538]: DEBUG oslo_vmware.api [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52272697-61b3-0f0d-cca3-6bdc164e8fd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.502025] env[63538]: DEBUG nova.network.neutron [req-8ee4c220-8ab5-4921-aacb-4b9577c9f9a7 req-57766c96-6073-4ea2-96d5-2f6791c730ec service nova] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Updated VIF entry in instance network info cache for port eb5819a0-9549-4a1a-af36-c661faf9b44f. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1304.502454] env[63538]: DEBUG nova.network.neutron [req-8ee4c220-8ab5-4921-aacb-4b9577c9f9a7 req-57766c96-6073-4ea2-96d5-2f6791c730ec service nova] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Updating instance_info_cache with network_info: [{"id": "eb5819a0-9549-4a1a-af36-c661faf9b44f", "address": "fa:16:3e:0b:65:13", "network": {"id": "59211045-9e6b-4999-8ba5-54e53c962714", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1134659557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "329e471d86b9451b86b0b28b2824eae5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a06a63d6-2aeb-4084-8022-f804cac3fa74", "external-id": "nsx-vlan-transportzone-797", "segmentation_id": 797, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb5819a0-95", "ovs_interfaceid": "eb5819a0-9549-4a1a-af36-c661faf9b44f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1304.575500] env[63538]: DEBUG oslo_vmware.api [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52272697-61b3-0f0d-cca3-6bdc164e8fd8, 'name': SearchDatastore_Task, 'duration_secs': 0.01046} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.575837] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1304.576102] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1304.576349] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1304.576503] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1304.576690] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1304.577328] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8cd24134-253e-4194-bfd6-6f1866b7c5b1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.586330] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1304.586518] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1304.587259] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bcf4681c-cd1f-4287-a100-bd21270235f4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.594423] env[63538]: DEBUG oslo_vmware.api [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1304.594423] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521d149e-9710-45ad-1d24-1f9fe29d7356" [ 1304.594423] env[63538]: _type = "Task" [ 1304.594423] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.602632] env[63538]: DEBUG oslo_vmware.api [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521d149e-9710-45ad-1d24-1f9fe29d7356, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.005630] env[63538]: DEBUG oslo_concurrency.lockutils [req-8ee4c220-8ab5-4921-aacb-4b9577c9f9a7 req-57766c96-6073-4ea2-96d5-2f6791c730ec service nova] Releasing lock "refresh_cache-2f7bc37b-36c6-404a-82a9-c2b0d4a72439" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1305.108158] env[63538]: DEBUG oslo_vmware.api [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]521d149e-9710-45ad-1d24-1f9fe29d7356, 'name': SearchDatastore_Task, 'duration_secs': 0.010219} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.108992] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8816caa-4806-4c3c-b8a7-6b42d0637d6e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.115590] env[63538]: DEBUG oslo_vmware.api [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1305.115590] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c2a296-840a-d875-434e-05d349126a04" [ 1305.115590] env[63538]: _type = "Task" [ 1305.115590] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.124709] env[63538]: DEBUG oslo_vmware.api [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c2a296-840a-d875-434e-05d349126a04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.626076] env[63538]: DEBUG oslo_vmware.api [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c2a296-840a-d875-434e-05d349126a04, 'name': SearchDatastore_Task, 'duration_secs': 0.018697} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.626198] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1305.626579] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 2f7bc37b-36c6-404a-82a9-c2b0d4a72439/2f7bc37b-36c6-404a-82a9-c2b0d4a72439.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1305.626903] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-119af59c-64b4-435c-b96b-22970997001e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.634213] env[63538]: DEBUG oslo_vmware.api [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1305.634213] env[63538]: value = "task-5102024" [ 1305.634213] env[63538]: _type = "Task" [ 1305.634213] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.642787] env[63538]: DEBUG oslo_vmware.api [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102024, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.145256] env[63538]: DEBUG oslo_vmware.api [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102024, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.645537] env[63538]: DEBUG oslo_vmware.api [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102024, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.589324} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.646123] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 2f7bc37b-36c6-404a-82a9-c2b0d4a72439/2f7bc37b-36c6-404a-82a9-c2b0d4a72439.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1306.646123] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1306.646384] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-47d25164-4af2-41ba-810e-6d49944cdd5a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.654831] env[63538]: DEBUG oslo_vmware.api [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1306.654831] env[63538]: value = "task-5102025" [ 1306.654831] env[63538]: _type = "Task" [ 1306.654831] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.664673] env[63538]: DEBUG oslo_vmware.api [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102025, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.164101] env[63538]: DEBUG oslo_vmware.api [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102025, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069869} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.164541] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1307.165157] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc4968ee-d52b-4c46-98eb-311a45fa6368 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.187031] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] 2f7bc37b-36c6-404a-82a9-c2b0d4a72439/2f7bc37b-36c6-404a-82a9-c2b0d4a72439.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1307.187284] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d563172-2123-4a6e-9cf0-7834f9fe5b95 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.207618] env[63538]: DEBUG oslo_vmware.api [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1307.207618] env[63538]: value = "task-5102026" [ 1307.207618] env[63538]: _type = "Task" [ 1307.207618] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.215510] env[63538]: DEBUG oslo_vmware.api [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102026, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.717435] env[63538]: DEBUG oslo_vmware.api [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102026, 'name': ReconfigVM_Task, 'duration_secs': 0.322851} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.717708] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Reconfigured VM instance instance-0000007c to attach disk [datastore1] 2f7bc37b-36c6-404a-82a9-c2b0d4a72439/2f7bc37b-36c6-404a-82a9-c2b0d4a72439.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1307.718374] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e625ad4a-2dbf-4f1a-86d8-1ac76c673d6a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.725379] env[63538]: DEBUG oslo_vmware.api [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1307.725379] env[63538]: value = "task-5102027" [ 1307.725379] env[63538]: _type = "Task" [ 1307.725379] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.733071] env[63538]: DEBUG oslo_vmware.api [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102027, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.235728] env[63538]: DEBUG oslo_vmware.api [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102027, 'name': Rename_Task, 'duration_secs': 0.13078} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.236217] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1308.236347] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bcdb7907-898b-4c2d-a8cf-1ef4e3262906 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.243556] env[63538]: DEBUG oslo_vmware.api [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1308.243556] env[63538]: value = "task-5102028" [ 1308.243556] env[63538]: _type = "Task" [ 1308.243556] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.253205] env[63538]: DEBUG oslo_vmware.api [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102028, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.754137] env[63538]: DEBUG oslo_vmware.api [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102028, 'name': PowerOnVM_Task, 'duration_secs': 0.430389} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.754408] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1308.754591] env[63538]: INFO nova.compute.manager [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Took 7.20 seconds to spawn the instance on the hypervisor. [ 1308.754776] env[63538]: DEBUG nova.compute.manager [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1308.755662] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cff3f39-3b7f-41f4-946c-bef3eab7e5ee {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.276052] env[63538]: INFO nova.compute.manager [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Took 11.90 seconds to build instance. [ 1309.583766] env[63538]: INFO nova.compute.manager [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Rescuing [ 1309.584051] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquiring lock "refresh_cache-2f7bc37b-36c6-404a-82a9-c2b0d4a72439" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1309.584213] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquired lock "refresh_cache-2f7bc37b-36c6-404a-82a9-c2b0d4a72439" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1309.584415] env[63538]: DEBUG nova.network.neutron [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1309.777159] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6c361450-bf7f-4422-88ac-c1641f7a69fc tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Lock "2f7bc37b-36c6-404a-82a9-c2b0d4a72439" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.412s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1310.297040] env[63538]: DEBUG nova.network.neutron [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Updating instance_info_cache with network_info: [{"id": "eb5819a0-9549-4a1a-af36-c661faf9b44f", "address": "fa:16:3e:0b:65:13", "network": {"id": "59211045-9e6b-4999-8ba5-54e53c962714", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1134659557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "329e471d86b9451b86b0b28b2824eae5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a06a63d6-2aeb-4084-8022-f804cac3fa74", "external-id": "nsx-vlan-transportzone-797", "segmentation_id": 797, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb5819a0-95", "ovs_interfaceid": "eb5819a0-9549-4a1a-af36-c661faf9b44f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1310.800443] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Releasing lock "refresh_cache-2f7bc37b-36c6-404a-82a9-c2b0d4a72439" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1311.334054] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1311.334054] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b2d4fc8e-7392-4d98-a1c0-62f3158b851e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.343277] env[63538]: DEBUG oslo_vmware.api [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1311.343277] env[63538]: value = "task-5102029" [ 1311.343277] env[63538]: _type = "Task" [ 1311.343277] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.352425] env[63538]: DEBUG oslo_vmware.api [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102029, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.853104] env[63538]: DEBUG oslo_vmware.api [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102029, 'name': PowerOffVM_Task, 'duration_secs': 0.405066} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.853387] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1311.854203] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76112a6a-e6f8-4699-bbaa-d8becaf96d03 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.874204] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a9b8156-201b-4d1f-b85a-c6cc43c2fffa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.906209] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1311.906528] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-55a00f37-6480-4c60-9a71-7fb1a8cc0c98 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.913270] env[63538]: DEBUG oslo_vmware.api [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1311.913270] env[63538]: value = "task-5102030" [ 1311.913270] env[63538]: _type = "Task" [ 1311.913270] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.923534] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] VM already powered off {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1311.923754] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1311.924011] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1311.924181] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.924367] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1311.924608] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb3168f1-6ae9-4bac-bcde-bf6f5b536925 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.933717] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1311.933899] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1311.934623] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15e2b7ae-9b13-4b7b-ba4c-5a955ce26d76 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.940048] env[63538]: DEBUG oslo_vmware.api [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1311.940048] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520f13d9-6c0e-7928-9b3f-cb4684461810" [ 1311.940048] env[63538]: _type = "Task" [ 1311.940048] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.947480] env[63538]: DEBUG oslo_vmware.api [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520f13d9-6c0e-7928-9b3f-cb4684461810, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.450471] env[63538]: DEBUG oslo_vmware.api [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]520f13d9-6c0e-7928-9b3f-cb4684461810, 'name': SearchDatastore_Task, 'duration_secs': 0.015004} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.451323] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-103c413b-9812-49b4-8762-01b04d6c0328 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.457100] env[63538]: DEBUG oslo_vmware.api [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1312.457100] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5206356e-753f-58fa-913e-86b214e1d525" [ 1312.457100] env[63538]: _type = "Task" [ 1312.457100] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.465621] env[63538]: DEBUG oslo_vmware.api [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5206356e-753f-58fa-913e-86b214e1d525, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.967589] env[63538]: DEBUG oslo_vmware.api [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5206356e-753f-58fa-913e-86b214e1d525, 'name': SearchDatastore_Task, 'duration_secs': 0.010692} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.967868] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1312.968137] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 2f7bc37b-36c6-404a-82a9-c2b0d4a72439/faabbca4-e27b-433a-b93d-f059fd73bc92-rescue.vmdk. {{(pid=63538) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1312.968415] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-57879a2a-effa-4aaa-b312-ceb73cacf5bc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.974466] env[63538]: DEBUG oslo_vmware.api [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1312.974466] env[63538]: value = "task-5102031" [ 1312.974466] env[63538]: _type = "Task" [ 1312.974466] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.982266] env[63538]: DEBUG oslo_vmware.api [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102031, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.486069] env[63538]: DEBUG oslo_vmware.api [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102031, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505329} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.486069] env[63538]: INFO nova.virt.vmwareapi.ds_util [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 2f7bc37b-36c6-404a-82a9-c2b0d4a72439/faabbca4-e27b-433a-b93d-f059fd73bc92-rescue.vmdk. [ 1313.486529] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-709e6d5c-da49-4ff5-8e0c-2fd1a50caffa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.513308] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] 2f7bc37b-36c6-404a-82a9-c2b0d4a72439/faabbca4-e27b-433a-b93d-f059fd73bc92-rescue.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1313.513547] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6154781e-cc52-4cf1-97da-5c603ec3a519 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.533994] env[63538]: DEBUG oslo_vmware.api [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1313.533994] env[63538]: value = "task-5102032" [ 1313.533994] env[63538]: _type = "Task" [ 1313.533994] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.542475] env[63538]: DEBUG oslo_vmware.api [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102032, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.044768] env[63538]: DEBUG oslo_vmware.api [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102032, 'name': ReconfigVM_Task, 'duration_secs': 0.360928} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.045073] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Reconfigured VM instance instance-0000007c to attach disk [datastore1] 2f7bc37b-36c6-404a-82a9-c2b0d4a72439/faabbca4-e27b-433a-b93d-f059fd73bc92-rescue.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1314.045971] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4242180f-70d2-4185-9270-f93bd403138f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.072849] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b7f75bf-c209-4550-bba1-ef92b41509b5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.088732] env[63538]: DEBUG oslo_vmware.api [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1314.088732] env[63538]: value = "task-5102033" [ 1314.088732] env[63538]: _type = "Task" [ 1314.088732] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.096640] env[63538]: DEBUG oslo_vmware.api [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102033, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.598263] env[63538]: DEBUG oslo_vmware.api [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102033, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.099635] env[63538]: DEBUG oslo_vmware.api [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102033, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.600035] env[63538]: DEBUG oslo_vmware.api [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102033, 'name': ReconfigVM_Task, 'duration_secs': 1.169312} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.600035] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1315.600035] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea62dc1b-52d6-42b9-a1f0-d4e5d57cdad9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.607568] env[63538]: DEBUG oslo_vmware.api [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1315.607568] env[63538]: value = "task-5102034" [ 1315.607568] env[63538]: _type = "Task" [ 1315.607568] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.616875] env[63538]: DEBUG oslo_vmware.api [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102034, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.118417] env[63538]: DEBUG oslo_vmware.api [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102034, 'name': PowerOnVM_Task, 'duration_secs': 0.400241} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.118779] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1316.121898] env[63538]: DEBUG nova.compute.manager [None req-6c4e8eb1-c1cb-499d-a5c3-b5d6f5941e71 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1316.122717] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb6f845-855f-4c31-ba09-108edd1a8380 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.716329] env[63538]: DEBUG oslo_concurrency.lockutils [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "d6215939-5e06-425d-b947-224eebb8386b" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1317.716754] env[63538]: DEBUG oslo_concurrency.lockutils [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "d6215939-5e06-425d-b947-224eebb8386b" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1317.716808] env[63538]: INFO nova.compute.manager [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Shelving [ 1317.892079] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquiring lock "40d2c269-449b-4b1e-9422-abcfb6543c11" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1317.892315] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Lock "40d2c269-449b-4b1e-9422-abcfb6543c11" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1318.224647] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1318.224940] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e5e2cb3b-f622-4eb9-aebd-16436a25684b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.232853] env[63538]: DEBUG oslo_vmware.api [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1318.232853] env[63538]: value = "task-5102035" [ 1318.232853] env[63538]: _type = "Task" [ 1318.232853] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.242851] env[63538]: DEBUG oslo_vmware.api [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102035, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.395884] env[63538]: DEBUG nova.compute.manager [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1318.742591] env[63538]: DEBUG oslo_vmware.api [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102035, 'name': PowerOffVM_Task, 'duration_secs': 0.31968} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.743017] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1318.743673] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f467d6-9649-483d-afcd-814d56ae883a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.761668] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-872f3904-88b0-44b4-971c-d6932fb52d60 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.917439] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1318.917720] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1318.919343] env[63538]: INFO nova.compute.claims [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1319.272256] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Creating Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1319.272586] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-fc355ce3-4586-4658-ad7b-e6349d0bc9d5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.281198] env[63538]: DEBUG oslo_vmware.api [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1319.281198] env[63538]: value = "task-5102036" [ 1319.281198] env[63538]: _type = "Task" [ 1319.281198] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.290031] env[63538]: DEBUG oslo_vmware.api [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102036, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.791737] env[63538]: DEBUG oslo_vmware.api [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102036, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.989242] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-204b6cbf-3167-40b8-a691-cd1a4c5f442c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.996978] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef1e721c-6660-4477-8dd6-09c5b6bceb8b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.027704] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5060ce52-921e-4905-8d18-144abac2624d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.035892] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc16e84a-9a76-477f-b26e-ac1d6d06f602 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.049660] env[63538]: DEBUG nova.compute.provider_tree [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1320.292407] env[63538]: DEBUG oslo_vmware.api [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102036, 'name': CreateSnapshot_Task, 'duration_secs': 0.683292} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.292683] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Created Snapshot of the VM instance {{(pid=63538) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1320.293460] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e30f11-4e37-4d8a-a610-758c1ae816e9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.552951] env[63538]: DEBUG nova.scheduler.client.report [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1320.654354] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1320.654626] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1320.654712] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Starting heal instance info cache {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10017}} [ 1320.654832] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Rebuilding the list of instances to heal {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10021}} [ 1320.810826] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Creating linked-clone VM from snapshot {{(pid=63538) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1320.811266] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-20f761b7-d624-4d08-92c3-e82d9bc1d545 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.820015] env[63538]: DEBUG oslo_vmware.api [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1320.820015] env[63538]: value = "task-5102037" [ 1320.820015] env[63538]: _type = "Task" [ 1320.820015] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.828504] env[63538]: DEBUG oslo_vmware.api [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102037, 'name': CloneVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.058537] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.141s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1321.059141] env[63538]: DEBUG nova.compute.manager [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1321.159201] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Skipping network cache update for instance because it is Building. {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10030}} [ 1321.159418] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "refresh_cache-d6215939-5e06-425d-b947-224eebb8386b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1321.159516] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquired lock "refresh_cache-d6215939-5e06-425d-b947-224eebb8386b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1321.159657] env[63538]: DEBUG nova.network.neutron [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: d6215939-5e06-425d-b947-224eebb8386b] Forcefully refreshing network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1321.159805] env[63538]: DEBUG nova.objects.instance [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lazy-loading 'info_cache' on Instance uuid d6215939-5e06-425d-b947-224eebb8386b {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1321.330783] env[63538]: DEBUG oslo_vmware.api [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102037, 'name': CloneVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.564138] env[63538]: DEBUG nova.compute.utils [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1321.565600] env[63538]: DEBUG nova.compute.manager [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1321.565775] env[63538]: DEBUG nova.network.neutron [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1321.613534] env[63538]: DEBUG nova.policy [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '534d33f66f17494c835f4e9d21b48909', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '329e471d86b9451b86b0b28b2824eae5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1321.831760] env[63538]: DEBUG oslo_vmware.api [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102037, 'name': CloneVM_Task} progress is 95%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.879710] env[63538]: DEBUG nova.network.neutron [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Successfully created port: f27718d0-5d86-447d-ad7b-597b5af98ab8 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1322.068818] env[63538]: DEBUG nova.compute.manager [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1322.332234] env[63538]: DEBUG oslo_vmware.api [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102037, 'name': CloneVM_Task, 'duration_secs': 1.188714} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.332518] env[63538]: INFO nova.virt.vmwareapi.vmops [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Created linked-clone VM from snapshot [ 1322.333306] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f8d02ad-da37-4454-bce7-926c0e668e96 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.341722] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Uploading image e467b0b6-bd80-444b-b4c5-e5bc6eaff630 {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 1322.368613] env[63538]: DEBUG oslo_vmware.rw_handles [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1322.368613] env[63538]: value = "vm-992567" [ 1322.368613] env[63538]: _type = "VirtualMachine" [ 1322.368613] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1322.368976] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f70ad169-9372-411b-b180-32b8a6e9eea3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.376845] env[63538]: DEBUG oslo_vmware.rw_handles [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lease: (returnval){ [ 1322.376845] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c3e86b-8dbc-1a94-3758-2132a1183584" [ 1322.376845] env[63538]: _type = "HttpNfcLease" [ 1322.376845] env[63538]: } obtained for exporting VM: (result){ [ 1322.376845] env[63538]: value = "vm-992567" [ 1322.376845] env[63538]: _type = "VirtualMachine" [ 1322.376845] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1322.377198] env[63538]: DEBUG oslo_vmware.api [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the lease: (returnval){ [ 1322.377198] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c3e86b-8dbc-1a94-3758-2132a1183584" [ 1322.377198] env[63538]: _type = "HttpNfcLease" [ 1322.377198] env[63538]: } to be ready. {{(pid=63538) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1322.384664] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1322.384664] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c3e86b-8dbc-1a94-3758-2132a1183584" [ 1322.384664] env[63538]: _type = "HttpNfcLease" [ 1322.384664] env[63538]: } is initializing. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1322.885542] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1322.885542] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c3e86b-8dbc-1a94-3758-2132a1183584" [ 1322.885542] env[63538]: _type = "HttpNfcLease" [ 1322.885542] env[63538]: } is ready. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1322.886044] env[63538]: DEBUG oslo_vmware.rw_handles [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1322.886044] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c3e86b-8dbc-1a94-3758-2132a1183584" [ 1322.886044] env[63538]: _type = "HttpNfcLease" [ 1322.886044] env[63538]: }. {{(pid=63538) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1322.886694] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4457a686-009c-42a5-8359-620940214dbd {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.895623] env[63538]: DEBUG oslo_vmware.rw_handles [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52875882-ff46-521e-54df-ae24e0db714f/disk-0.vmdk from lease info. {{(pid=63538) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1322.895784] env[63538]: DEBUG oslo_vmware.rw_handles [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52875882-ff46-521e-54df-ae24e0db714f/disk-0.vmdk for reading. {{(pid=63538) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1322.952243] env[63538]: DEBUG nova.network.neutron [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: d6215939-5e06-425d-b947-224eebb8386b] Updating instance_info_cache with network_info: [{"id": "5f276ada-dc8e-4558-a169-bfcaf25172e8", "address": "fa:16:3e:59:b6:e8", "network": {"id": "690e6150-6e85-472d-baeb-85e69afd037b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1831468396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a06b7cc1ab24ba584bbe970e4fc5e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f276ada-dc", "ovs_interfaceid": "5f276ada-dc8e-4558-a169-bfcaf25172e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1322.986741] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1a5f5ac7-cf10-4c92-8fd2-3f342e61f12c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.078354] env[63538]: DEBUG nova.compute.manager [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1323.100574] env[63538]: DEBUG nova.virt.hardware [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1323.100845] env[63538]: DEBUG nova.virt.hardware [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1323.101019] env[63538]: DEBUG nova.virt.hardware [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1323.101213] env[63538]: DEBUG nova.virt.hardware [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1323.101370] env[63538]: DEBUG nova.virt.hardware [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1323.101523] env[63538]: DEBUG nova.virt.hardware [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1323.101734] env[63538]: DEBUG nova.virt.hardware [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1323.101898] env[63538]: DEBUG nova.virt.hardware [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1323.102086] env[63538]: DEBUG nova.virt.hardware [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1323.102262] env[63538]: DEBUG nova.virt.hardware [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1323.102437] env[63538]: DEBUG nova.virt.hardware [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1323.103333] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5f4e362-c4b4-4cbb-808b-854fc0365cef {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.112581] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72020767-de66-45d8-b0d9-835a49781a65 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.304122] env[63538]: DEBUG nova.compute.manager [req-7754af6d-f970-4875-8b81-02f25ee2a620 req-4ae45bfa-b664-493e-b3d6-c58c54007b4f service nova] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Received event network-vif-plugged-f27718d0-5d86-447d-ad7b-597b5af98ab8 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1323.304446] env[63538]: DEBUG oslo_concurrency.lockutils [req-7754af6d-f970-4875-8b81-02f25ee2a620 req-4ae45bfa-b664-493e-b3d6-c58c54007b4f service nova] Acquiring lock "40d2c269-449b-4b1e-9422-abcfb6543c11-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1323.304713] env[63538]: DEBUG oslo_concurrency.lockutils [req-7754af6d-f970-4875-8b81-02f25ee2a620 req-4ae45bfa-b664-493e-b3d6-c58c54007b4f service nova] Lock "40d2c269-449b-4b1e-9422-abcfb6543c11-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1323.304917] env[63538]: DEBUG oslo_concurrency.lockutils [req-7754af6d-f970-4875-8b81-02f25ee2a620 req-4ae45bfa-b664-493e-b3d6-c58c54007b4f service nova] Lock "40d2c269-449b-4b1e-9422-abcfb6543c11-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1323.305141] env[63538]: DEBUG nova.compute.manager [req-7754af6d-f970-4875-8b81-02f25ee2a620 req-4ae45bfa-b664-493e-b3d6-c58c54007b4f service nova] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] No waiting events found dispatching network-vif-plugged-f27718d0-5d86-447d-ad7b-597b5af98ab8 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1323.305373] env[63538]: WARNING nova.compute.manager [req-7754af6d-f970-4875-8b81-02f25ee2a620 req-4ae45bfa-b664-493e-b3d6-c58c54007b4f service nova] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Received unexpected event network-vif-plugged-f27718d0-5d86-447d-ad7b-597b5af98ab8 for instance with vm_state building and task_state spawning. [ 1323.404612] env[63538]: DEBUG nova.network.neutron [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Successfully updated port: f27718d0-5d86-447d-ad7b-597b5af98ab8 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1323.454615] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Releasing lock "refresh_cache-d6215939-5e06-425d-b947-224eebb8386b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1323.454837] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: d6215939-5e06-425d-b947-224eebb8386b] Updated the network info_cache for instance {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10088}} [ 1323.455088] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1323.455282] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1323.455431] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1323.455581] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1323.455726] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1323.455874] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1323.456032] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63538) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10636}} [ 1323.456295] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1323.907580] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquiring lock "refresh_cache-40d2c269-449b-4b1e-9422-abcfb6543c11" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1323.908022] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquired lock "refresh_cache-40d2c269-449b-4b1e-9422-abcfb6543c11" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.908022] env[63538]: DEBUG nova.network.neutron [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1323.959783] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1323.960015] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1323.960201] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1323.960367] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63538) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1323.961488] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f19d7041-6671-480f-b714-beec2049f395 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.971158] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf8a6fc8-681d-4198-8b7e-8db2113eed11 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.987992] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3f2f31-8a55-4f29-8ef3-a5a0446812e7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.996187] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e17f035-6b5f-4e40-a6fb-10f9e437c20e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.030480] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180111MB free_disk=95GB free_vcpus=48 pci_devices=None {{(pid=63538) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1324.030793] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1324.031148] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1324.439443] env[63538]: DEBUG nova.network.neutron [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1324.561520] env[63538]: DEBUG nova.network.neutron [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Updating instance_info_cache with network_info: [{"id": "f27718d0-5d86-447d-ad7b-597b5af98ab8", "address": "fa:16:3e:3f:dd:e0", "network": {"id": "59211045-9e6b-4999-8ba5-54e53c962714", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1134659557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "329e471d86b9451b86b0b28b2824eae5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a06a63d6-2aeb-4084-8022-f804cac3fa74", "external-id": "nsx-vlan-transportzone-797", "segmentation_id": 797, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf27718d0-5d", "ovs_interfaceid": "f27718d0-5d86-447d-ad7b-597b5af98ab8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1325.059848] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance d6215939-5e06-425d-b947-224eebb8386b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1325.060227] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 579c71bd-24f0-4257-856c-b24ddb2b9dba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1325.060485] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 2f7bc37b-36c6-404a-82a9-c2b0d4a72439 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1325.060752] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 40d2c269-449b-4b1e-9422-abcfb6543c11 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1325.061156] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=63538) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1325.061404] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=100GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '4', 'num_vm_active': '2', 'num_task_shelving_image_uploading': '1', 'num_os_type_None': '4', 'num_proj_1a06b7cc1ab24ba584bbe970e4fc5e81': '1', 'io_workload': '1', 'num_task_None': '2', 'num_proj_9b1eba931f144b94b6e186dac1310dfa': '1', 'num_vm_rescued': '1', 'num_proj_329e471d86b9451b86b0b28b2824eae5': '2', 'num_vm_building': '1', 'num_task_spawning': '1'} {{(pid=63538) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1325.064161] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Releasing lock "refresh_cache-40d2c269-449b-4b1e-9422-abcfb6543c11" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1325.064435] env[63538]: DEBUG nova.compute.manager [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Instance network_info: |[{"id": "f27718d0-5d86-447d-ad7b-597b5af98ab8", "address": "fa:16:3e:3f:dd:e0", "network": {"id": "59211045-9e6b-4999-8ba5-54e53c962714", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1134659557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "329e471d86b9451b86b0b28b2824eae5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a06a63d6-2aeb-4084-8022-f804cac3fa74", "external-id": "nsx-vlan-transportzone-797", "segmentation_id": 797, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf27718d0-5d", "ovs_interfaceid": "f27718d0-5d86-447d-ad7b-597b5af98ab8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1325.065176] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:dd:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a06a63d6-2aeb-4084-8022-f804cac3fa74', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f27718d0-5d86-447d-ad7b-597b5af98ab8', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1325.076625] env[63538]: DEBUG oslo.service.loopingcall [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1325.080532] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1325.081230] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8ef87919-ffec-4d4a-a3ff-e3b673cc6948 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.115938] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1325.115938] env[63538]: value = "task-5102039" [ 1325.115938] env[63538]: _type = "Task" [ 1325.115938] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.129558] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5102039, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.148676] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c67bb10-6792-46ec-8a73-20618c66c890 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.158842] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17955ec2-2041-4ddd-9569-6a6c59a09469 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.204398] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e7ddca5-d544-48f0-abf0-64c6c441f146 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.214626] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db2d01c-ccef-40ab-987e-e9ca37b3c16a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.232553] env[63538]: DEBUG nova.compute.provider_tree [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1325.335489] env[63538]: DEBUG nova.compute.manager [req-f308f218-9e17-411b-bfe9-2073c53bb080 req-45417892-25d8-42e7-87d5-f83a153280af service nova] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Received event network-changed-f27718d0-5d86-447d-ad7b-597b5af98ab8 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1325.335651] env[63538]: DEBUG nova.compute.manager [req-f308f218-9e17-411b-bfe9-2073c53bb080 req-45417892-25d8-42e7-87d5-f83a153280af service nova] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Refreshing instance network info cache due to event network-changed-f27718d0-5d86-447d-ad7b-597b5af98ab8. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1325.335986] env[63538]: DEBUG oslo_concurrency.lockutils [req-f308f218-9e17-411b-bfe9-2073c53bb080 req-45417892-25d8-42e7-87d5-f83a153280af service nova] Acquiring lock "refresh_cache-40d2c269-449b-4b1e-9422-abcfb6543c11" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1325.336199] env[63538]: DEBUG oslo_concurrency.lockutils [req-f308f218-9e17-411b-bfe9-2073c53bb080 req-45417892-25d8-42e7-87d5-f83a153280af service nova] Acquired lock "refresh_cache-40d2c269-449b-4b1e-9422-abcfb6543c11" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1325.336409] env[63538]: DEBUG nova.network.neutron [req-f308f218-9e17-411b-bfe9-2073c53bb080 req-45417892-25d8-42e7-87d5-f83a153280af service nova] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Refreshing network info cache for port f27718d0-5d86-447d-ad7b-597b5af98ab8 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1325.626057] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5102039, 'name': CreateVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.736546] env[63538]: DEBUG nova.scheduler.client.report [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1326.054726] env[63538]: DEBUG nova.network.neutron [req-f308f218-9e17-411b-bfe9-2073c53bb080 req-45417892-25d8-42e7-87d5-f83a153280af service nova] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Updated VIF entry in instance network info cache for port f27718d0-5d86-447d-ad7b-597b5af98ab8. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1326.055133] env[63538]: DEBUG nova.network.neutron [req-f308f218-9e17-411b-bfe9-2073c53bb080 req-45417892-25d8-42e7-87d5-f83a153280af service nova] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Updating instance_info_cache with network_info: [{"id": "f27718d0-5d86-447d-ad7b-597b5af98ab8", "address": "fa:16:3e:3f:dd:e0", "network": {"id": "59211045-9e6b-4999-8ba5-54e53c962714", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1134659557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "329e471d86b9451b86b0b28b2824eae5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a06a63d6-2aeb-4084-8022-f804cac3fa74", "external-id": "nsx-vlan-transportzone-797", "segmentation_id": 797, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf27718d0-5d", "ovs_interfaceid": "f27718d0-5d86-447d-ad7b-597b5af98ab8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1326.127626] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5102039, 'name': CreateVM_Task, 'duration_secs': 0.687196} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.128073] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1326.128615] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1326.128862] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1326.129249] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1326.129564] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecc92d57-7169-4b7e-b34f-83e492b0e1b8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.134982] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1326.134982] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5281a38e-a53a-bc02-497a-1ee2372bb08d" [ 1326.134982] env[63538]: _type = "Task" [ 1326.134982] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.143677] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5281a38e-a53a-bc02-497a-1ee2372bb08d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.242065] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63538) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1326.242255] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.211s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1326.557928] env[63538]: DEBUG oslo_concurrency.lockutils [req-f308f218-9e17-411b-bfe9-2073c53bb080 req-45417892-25d8-42e7-87d5-f83a153280af service nova] Releasing lock "refresh_cache-40d2c269-449b-4b1e-9422-abcfb6543c11" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1326.646109] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5281a38e-a53a-bc02-497a-1ee2372bb08d, 'name': SearchDatastore_Task, 'duration_secs': 0.013893} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.646462] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1326.646707] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1326.646952] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1326.647133] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1326.647330] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1326.647600] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-06fb3379-ac08-4153-88be-5baa5ec9fa59 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.660753] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1326.660941] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1326.661700] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf10e23e-d76f-4982-98cc-0984f90d7e4d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.668379] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1326.668379] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b34774-600e-c798-4eec-fdf8716964f7" [ 1326.668379] env[63538]: _type = "Task" [ 1326.668379] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.680158] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b34774-600e-c798-4eec-fdf8716964f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.185383] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52b34774-600e-c798-4eec-fdf8716964f7, 'name': SearchDatastore_Task, 'duration_secs': 0.025728} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.186688] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb31d999-fb30-43fe-acb4-2fb3d84db79d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.195084] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1327.195084] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c8b085-f829-d675-c00c-979963fce77c" [ 1327.195084] env[63538]: _type = "Task" [ 1327.195084] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.205530] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c8b085-f829-d675-c00c-979963fce77c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.706562] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52c8b085-f829-d675-c00c-979963fce77c, 'name': SearchDatastore_Task, 'duration_secs': 0.014947} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.706830] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1327.707111] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 40d2c269-449b-4b1e-9422-abcfb6543c11/40d2c269-449b-4b1e-9422-abcfb6543c11.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1327.707509] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3de1c24c-535e-4a89-8bd2-ca2be39ba9cf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.716024] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1327.716024] env[63538]: value = "task-5102040" [ 1327.716024] env[63538]: _type = "Task" [ 1327.716024] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.726080] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102040, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.226275] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102040, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.728060] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102040, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.792326} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.728423] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 40d2c269-449b-4b1e-9422-abcfb6543c11/40d2c269-449b-4b1e-9422-abcfb6543c11.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1328.728654] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1328.728997] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-11e927b2-c9e8-47d8-aa1e-d05819c420dc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.737433] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1328.737433] env[63538]: value = "task-5102041" [ 1328.737433] env[63538]: _type = "Task" [ 1328.737433] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.746967] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102041, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.248146] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102041, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.132641} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.248522] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1329.249311] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e117cbe-3a5f-4390-b63c-403c65285968 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.273440] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] 40d2c269-449b-4b1e-9422-abcfb6543c11/40d2c269-449b-4b1e-9422-abcfb6543c11.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1329.273773] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5dbf61d9-e519-4d8b-af7c-c3d3537d9140 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.293726] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1329.293726] env[63538]: value = "task-5102042" [ 1329.293726] env[63538]: _type = "Task" [ 1329.293726] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.304369] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102042, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.805751] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102042, 'name': ReconfigVM_Task, 'duration_secs': 0.45355} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.806121] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Reconfigured VM instance instance-0000007d to attach disk [datastore2] 40d2c269-449b-4b1e-9422-abcfb6543c11/40d2c269-449b-4b1e-9422-abcfb6543c11.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1329.806863] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e23e043e-14bc-42bc-a5ce-8f3552e35f42 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.813851] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1329.813851] env[63538]: value = "task-5102043" [ 1329.813851] env[63538]: _type = "Task" [ 1329.813851] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.824473] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102043, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.326890] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102043, 'name': Rename_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.825453] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102043, 'name': Rename_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.124539] env[63538]: DEBUG oslo_vmware.rw_handles [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52875882-ff46-521e-54df-ae24e0db714f/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1331.125486] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-652c9a00-907e-4326-b4f0-4e9f5487fdf5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.131948] env[63538]: DEBUG oslo_vmware.rw_handles [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52875882-ff46-521e-54df-ae24e0db714f/disk-0.vmdk is in state: ready. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1331.132144] env[63538]: ERROR oslo_vmware.rw_handles [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52875882-ff46-521e-54df-ae24e0db714f/disk-0.vmdk due to incomplete transfer. [ 1331.132399] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c6b7a870-1c1c-451a-a306-64c8c8744eba {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.140705] env[63538]: DEBUG oslo_vmware.rw_handles [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52875882-ff46-521e-54df-ae24e0db714f/disk-0.vmdk. {{(pid=63538) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1331.140925] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Uploaded image e467b0b6-bd80-444b-b4c5-e5bc6eaff630 to the Glance image server {{(pid=63538) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 1331.143329] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Destroying the VM {{(pid=63538) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 1331.143596] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b56228cb-24ab-480c-8215-8d4665ea22a4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.150227] env[63538]: DEBUG oslo_vmware.api [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1331.150227] env[63538]: value = "task-5102044" [ 1331.150227] env[63538]: _type = "Task" [ 1331.150227] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.159509] env[63538]: DEBUG oslo_vmware.api [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102044, 'name': Destroy_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.325779] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102043, 'name': Rename_Task, 'duration_secs': 1.201813} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.326077] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1331.326353] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cc27d47c-3eec-48b0-be09-96091d6b7725 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.333485] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1331.333485] env[63538]: value = "task-5102045" [ 1331.333485] env[63538]: _type = "Task" [ 1331.333485] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.342214] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102045, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.660610] env[63538]: DEBUG oslo_vmware.api [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102044, 'name': Destroy_Task, 'duration_secs': 0.475177} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.660901] env[63538]: INFO nova.virt.vmwareapi.vm_util [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Destroyed the VM [ 1331.661162] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Deleting Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1331.661441] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a5d9e757-7b5f-4287-9d21-4b61ecc85e87 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.668446] env[63538]: DEBUG oslo_vmware.api [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1331.668446] env[63538]: value = "task-5102046" [ 1331.668446] env[63538]: _type = "Task" [ 1331.668446] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.676802] env[63538]: DEBUG oslo_vmware.api [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102046, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.844601] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102045, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.178657] env[63538]: DEBUG oslo_vmware.api [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102046, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.345170] env[63538]: DEBUG oslo_vmware.api [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102045, 'name': PowerOnVM_Task, 'duration_secs': 0.606377} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.345468] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1332.345674] env[63538]: INFO nova.compute.manager [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Took 9.27 seconds to spawn the instance on the hypervisor. [ 1332.345861] env[63538]: DEBUG nova.compute.manager [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1332.346730] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2926cfc-89f5-4cf3-a1ab-0393afbc354b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.679356] env[63538]: DEBUG oslo_vmware.api [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102046, 'name': RemoveSnapshot_Task, 'duration_secs': 0.871569} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.679549] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Deleted Snapshot of the VM instance {{(pid=63538) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1332.679830] env[63538]: DEBUG nova.compute.manager [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1332.680656] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f78798-6ae8-4179-962f-1fead803c85f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.863025] env[63538]: INFO nova.compute.manager [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Took 13.96 seconds to build instance. [ 1333.193693] env[63538]: INFO nova.compute.manager [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Shelve offloading [ 1333.195344] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1333.195833] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b4f06c64-e491-484a-83b6-06e8ec54e47a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.203409] env[63538]: DEBUG oslo_vmware.api [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1333.203409] env[63538]: value = "task-5102047" [ 1333.203409] env[63538]: _type = "Task" [ 1333.203409] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.212479] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] VM already powered off {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1333.212676] env[63538]: DEBUG nova.compute.manager [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1333.213461] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2468253-52fe-475c-aefc-1d4cc3b2bfed {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.219567] env[63538]: DEBUG oslo_concurrency.lockutils [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "refresh_cache-d6215939-5e06-425d-b947-224eebb8386b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1333.219727] env[63538]: DEBUG oslo_concurrency.lockutils [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquired lock "refresh_cache-d6215939-5e06-425d-b947-224eebb8386b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1333.219942] env[63538]: DEBUG nova.network.neutron [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1333.365345] env[63538]: DEBUG oslo_concurrency.lockutils [None req-dbc5f389-b36e-490c-865c-c084affb6b3a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Lock "40d2c269-449b-4b1e-9422-abcfb6543c11" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.473s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1333.428199] env[63538]: INFO nova.compute.manager [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Rescuing [ 1333.428511] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquiring lock "refresh_cache-40d2c269-449b-4b1e-9422-abcfb6543c11" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1333.428688] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquired lock "refresh_cache-40d2c269-449b-4b1e-9422-abcfb6543c11" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1333.428990] env[63538]: DEBUG nova.network.neutron [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1333.906191] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2859cd54-8b2e-4d0a-9744-e303b4570f1b tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "579c71bd-24f0-4257-856c-b24ddb2b9dba" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1333.906434] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2859cd54-8b2e-4d0a-9744-e303b4570f1b tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "579c71bd-24f0-4257-856c-b24ddb2b9dba" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1333.942586] env[63538]: DEBUG nova.network.neutron [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Updating instance_info_cache with network_info: [{"id": "5f276ada-dc8e-4558-a169-bfcaf25172e8", "address": "fa:16:3e:59:b6:e8", "network": {"id": "690e6150-6e85-472d-baeb-85e69afd037b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1831468396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a06b7cc1ab24ba584bbe970e4fc5e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f276ada-dc", "ovs_interfaceid": "5f276ada-dc8e-4558-a169-bfcaf25172e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1334.111365] env[63538]: DEBUG nova.network.neutron [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Updating instance_info_cache with network_info: [{"id": "f27718d0-5d86-447d-ad7b-597b5af98ab8", "address": "fa:16:3e:3f:dd:e0", "network": {"id": "59211045-9e6b-4999-8ba5-54e53c962714", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1134659557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "329e471d86b9451b86b0b28b2824eae5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a06a63d6-2aeb-4084-8022-f804cac3fa74", "external-id": "nsx-vlan-transportzone-797", "segmentation_id": 797, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf27718d0-5d", "ovs_interfaceid": "f27718d0-5d86-447d-ad7b-597b5af98ab8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1334.409729] env[63538]: DEBUG nova.compute.utils [None req-2859cd54-8b2e-4d0a-9744-e303b4570f1b tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1334.445213] env[63538]: DEBUG oslo_concurrency.lockutils [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Releasing lock "refresh_cache-d6215939-5e06-425d-b947-224eebb8386b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1334.614090] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Releasing lock "refresh_cache-40d2c269-449b-4b1e-9422-abcfb6543c11" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1334.733754] env[63538]: DEBUG nova.compute.manager [req-11a756bc-3e15-409a-9073-ac672c96ae16 req-0e63c3a9-ee05-4d17-8806-d6dd195d1da2 service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Received event network-vif-unplugged-5f276ada-dc8e-4558-a169-bfcaf25172e8 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1334.734022] env[63538]: DEBUG oslo_concurrency.lockutils [req-11a756bc-3e15-409a-9073-ac672c96ae16 req-0e63c3a9-ee05-4d17-8806-d6dd195d1da2 service nova] Acquiring lock "d6215939-5e06-425d-b947-224eebb8386b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1334.734276] env[63538]: DEBUG oslo_concurrency.lockutils [req-11a756bc-3e15-409a-9073-ac672c96ae16 req-0e63c3a9-ee05-4d17-8806-d6dd195d1da2 service nova] Lock "d6215939-5e06-425d-b947-224eebb8386b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1334.734433] env[63538]: DEBUG oslo_concurrency.lockutils [req-11a756bc-3e15-409a-9073-ac672c96ae16 req-0e63c3a9-ee05-4d17-8806-d6dd195d1da2 service nova] Lock "d6215939-5e06-425d-b947-224eebb8386b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.734602] env[63538]: DEBUG nova.compute.manager [req-11a756bc-3e15-409a-9073-ac672c96ae16 req-0e63c3a9-ee05-4d17-8806-d6dd195d1da2 service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] No waiting events found dispatching network-vif-unplugged-5f276ada-dc8e-4558-a169-bfcaf25172e8 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1334.734781] env[63538]: WARNING nova.compute.manager [req-11a756bc-3e15-409a-9073-ac672c96ae16 req-0e63c3a9-ee05-4d17-8806-d6dd195d1da2 service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Received unexpected event network-vif-unplugged-5f276ada-dc8e-4558-a169-bfcaf25172e8 for instance with vm_state shelved and task_state shelving_offloading. [ 1334.829568] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1334.830575] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfee0428-366c-4191-9b9b-32e13d982f44 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.839211] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1334.839535] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0ca236cc-e6a8-4ccb-91cc-b8baa88c5e4f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.905432] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1334.905629] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1334.905798] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Deleting the datastore file [datastore2] d6215939-5e06-425d-b947-224eebb8386b {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1334.906170] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ab870b9-48bd-47fa-8a3a-b693ede43675 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.913689] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2859cd54-8b2e-4d0a-9744-e303b4570f1b tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "579c71bd-24f0-4257-856c-b24ddb2b9dba" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.914288] env[63538]: DEBUG oslo_vmware.api [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1334.914288] env[63538]: value = "task-5102049" [ 1334.914288] env[63538]: _type = "Task" [ 1334.914288] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.925106] env[63538]: DEBUG oslo_vmware.api [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102049, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.148351] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1335.148755] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fb66b4f2-99c3-432b-b819-9ef162832c34 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.157298] env[63538]: DEBUG oslo_vmware.api [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1335.157298] env[63538]: value = "task-5102050" [ 1335.157298] env[63538]: _type = "Task" [ 1335.157298] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.167294] env[63538]: DEBUG oslo_vmware.api [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102050, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.445051] env[63538]: DEBUG oslo_vmware.api [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102049, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136778} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.445051] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1335.445051] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1335.445051] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1335.476253] env[63538]: INFO nova.scheduler.client.report [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Deleted allocations for instance d6215939-5e06-425d-b947-224eebb8386b [ 1335.667229] env[63538]: DEBUG oslo_vmware.api [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102050, 'name': PowerOffVM_Task, 'duration_secs': 0.214124} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.667544] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1335.668414] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ebb990-5412-4aa6-a933-4a1f1fe90aef {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.686463] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddca78c2-8b16-4fac-ab87-ac6c08a066d9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.718122] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1335.718416] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f80a855e-1da3-4f3c-89e8-c8c9f25a7108 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.724750] env[63538]: DEBUG oslo_vmware.api [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1335.724750] env[63538]: value = "task-5102051" [ 1335.724750] env[63538]: _type = "Task" [ 1335.724750] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.734685] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] VM already powered off {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1335.734898] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1335.735162] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1335.735318] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquired lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1335.735497] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1335.735741] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e50d2f9c-e1f6-43dd-84d5-e9e5cfd1d1c8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.743694] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1335.743872] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1335.744603] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8fb5dd4-f4a0-41fc-82f6-61e55705663d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.750332] env[63538]: DEBUG oslo_vmware.api [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1335.750332] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523f8378-b7b5-2892-0a59-d2a224094a99" [ 1335.750332] env[63538]: _type = "Task" [ 1335.750332] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.757990] env[63538]: DEBUG oslo_vmware.api [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523f8378-b7b5-2892-0a59-d2a224094a99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.981514] env[63538]: DEBUG oslo_concurrency.lockutils [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1335.981737] env[63538]: DEBUG oslo_concurrency.lockutils [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1335.981983] env[63538]: DEBUG nova.objects.instance [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lazy-loading 'resources' on Instance uuid d6215939-5e06-425d-b947-224eebb8386b {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1335.996226] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2859cd54-8b2e-4d0a-9744-e303b4570f1b tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "579c71bd-24f0-4257-856c-b24ddb2b9dba" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1335.996826] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2859cd54-8b2e-4d0a-9744-e303b4570f1b tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "579c71bd-24f0-4257-856c-b24ddb2b9dba" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1335.997083] env[63538]: INFO nova.compute.manager [None req-2859cd54-8b2e-4d0a-9744-e303b4570f1b tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Attaching volume 795bf84c-2544-4215-8a47-c7bd3b14f464 to /dev/sdb [ 1336.028012] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d14f1e81-9157-4deb-bb05-65e73781c974 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.035518] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b942e7b2-0576-48ab-9a17-464884703296 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.048434] env[63538]: DEBUG nova.virt.block_device [None req-2859cd54-8b2e-4d0a-9744-e303b4570f1b tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Updating existing volume attachment record: 57849e6e-7ba6-446a-b4fa-820db956c0e7 {{(pid=63538) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1336.264966] env[63538]: DEBUG oslo_vmware.api [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523f8378-b7b5-2892-0a59-d2a224094a99, 'name': SearchDatastore_Task, 'duration_secs': 0.00891} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.265823] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-208af04e-1fa3-42ef-af5b-ee26b8c8463b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.271622] env[63538]: DEBUG oslo_vmware.api [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1336.271622] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525d9b17-6680-bce0-1b49-8cef421f4d24" [ 1336.271622] env[63538]: _type = "Task" [ 1336.271622] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.279617] env[63538]: DEBUG oslo_vmware.api [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525d9b17-6680-bce0-1b49-8cef421f4d24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.484900] env[63538]: DEBUG nova.objects.instance [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lazy-loading 'numa_topology' on Instance uuid d6215939-5e06-425d-b947-224eebb8386b {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1336.759982] env[63538]: DEBUG nova.compute.manager [req-dac7402c-b4d4-47c8-b001-f29de763727a req-cd6354cc-6b9f-47dc-b309-69538fe23369 service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Received event network-changed-5f276ada-dc8e-4558-a169-bfcaf25172e8 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1336.760216] env[63538]: DEBUG nova.compute.manager [req-dac7402c-b4d4-47c8-b001-f29de763727a req-cd6354cc-6b9f-47dc-b309-69538fe23369 service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Refreshing instance network info cache due to event network-changed-5f276ada-dc8e-4558-a169-bfcaf25172e8. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1336.760457] env[63538]: DEBUG oslo_concurrency.lockutils [req-dac7402c-b4d4-47c8-b001-f29de763727a req-cd6354cc-6b9f-47dc-b309-69538fe23369 service nova] Acquiring lock "refresh_cache-d6215939-5e06-425d-b947-224eebb8386b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1336.760617] env[63538]: DEBUG oslo_concurrency.lockutils [req-dac7402c-b4d4-47c8-b001-f29de763727a req-cd6354cc-6b9f-47dc-b309-69538fe23369 service nova] Acquired lock "refresh_cache-d6215939-5e06-425d-b947-224eebb8386b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.760792] env[63538]: DEBUG nova.network.neutron [req-dac7402c-b4d4-47c8-b001-f29de763727a req-cd6354cc-6b9f-47dc-b309-69538fe23369 service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Refreshing network info cache for port 5f276ada-dc8e-4558-a169-bfcaf25172e8 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1336.783185] env[63538]: DEBUG oslo_vmware.api [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]525d9b17-6680-bce0-1b49-8cef421f4d24, 'name': SearchDatastore_Task, 'duration_secs': 0.009758} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.783465] env[63538]: DEBUG oslo_concurrency.lockutils [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Releasing lock "[datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1336.783810] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 40d2c269-449b-4b1e-9422-abcfb6543c11/faabbca4-e27b-433a-b93d-f059fd73bc92-rescue.vmdk. {{(pid=63538) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1336.784134] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-28f1c69c-bf51-4879-8acc-3288ac536d9e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.792487] env[63538]: DEBUG oslo_vmware.api [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1336.792487] env[63538]: value = "task-5102053" [ 1336.792487] env[63538]: _type = "Task" [ 1336.792487] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.801064] env[63538]: DEBUG oslo_vmware.api [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102053, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.987762] env[63538]: DEBUG nova.objects.base [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=63538) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1337.047467] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d7b4bd2-e81b-472d-8219-b6691b0dd830 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.055841] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d39fc850-0ad1-47a9-82dc-e88a5b8af0ac {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.088330] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4270165-53ba-49d8-a290-22ef9a58d724 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.097091] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79e3ca4f-7fa3-4db4-9c48-6d9821056923 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.114333] env[63538]: DEBUG nova.compute.provider_tree [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1337.303677] env[63538]: DEBUG oslo_vmware.api [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102053, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48546} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.306224] env[63538]: INFO nova.virt.vmwareapi.ds_util [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore2] 40d2c269-449b-4b1e-9422-abcfb6543c11/faabbca4-e27b-433a-b93d-f059fd73bc92-rescue.vmdk. [ 1337.307165] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cff8014-12cc-429c-a8d5-416fb9534a1e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.332451] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] 40d2c269-449b-4b1e-9422-abcfb6543c11/faabbca4-e27b-433a-b93d-f059fd73bc92-rescue.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1337.332762] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b1692ef-4a34-4491-81eb-ea1e86370345 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.352963] env[63538]: DEBUG oslo_vmware.api [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1337.352963] env[63538]: value = "task-5102054" [ 1337.352963] env[63538]: _type = "Task" [ 1337.352963] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.360845] env[63538]: DEBUG oslo_vmware.api [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102054, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.510487] env[63538]: DEBUG nova.network.neutron [req-dac7402c-b4d4-47c8-b001-f29de763727a req-cd6354cc-6b9f-47dc-b309-69538fe23369 service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Updated VIF entry in instance network info cache for port 5f276ada-dc8e-4558-a169-bfcaf25172e8. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1337.511033] env[63538]: DEBUG nova.network.neutron [req-dac7402c-b4d4-47c8-b001-f29de763727a req-cd6354cc-6b9f-47dc-b309-69538fe23369 service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Updating instance_info_cache with network_info: [{"id": "5f276ada-dc8e-4558-a169-bfcaf25172e8", "address": "fa:16:3e:59:b6:e8", "network": {"id": "690e6150-6e85-472d-baeb-85e69afd037b", "bridge": null, "label": "tempest-ServerActionsTestOtherB-1831468396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a06b7cc1ab24ba584bbe970e4fc5e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap5f276ada-dc", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1337.618126] env[63538]: DEBUG nova.scheduler.client.report [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1337.863333] env[63538]: DEBUG oslo_vmware.api [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102054, 'name': ReconfigVM_Task, 'duration_secs': 0.294425} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.863642] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Reconfigured VM instance instance-0000007d to attach disk [datastore2] 40d2c269-449b-4b1e-9422-abcfb6543c11/faabbca4-e27b-433a-b93d-f059fd73bc92-rescue.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1337.864507] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-461a990b-df62-42da-828c-dbd28a980f9b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.888730] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb248576-c37d-4063-81cf-db0c3887ac30 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.904410] env[63538]: DEBUG oslo_vmware.api [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1337.904410] env[63538]: value = "task-5102055" [ 1337.904410] env[63538]: _type = "Task" [ 1337.904410] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.917704] env[63538]: DEBUG oslo_vmware.api [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102055, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.991708] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "d6215939-5e06-425d-b947-224eebb8386b" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1338.014183] env[63538]: DEBUG oslo_concurrency.lockutils [req-dac7402c-b4d4-47c8-b001-f29de763727a req-cd6354cc-6b9f-47dc-b309-69538fe23369 service nova] Releasing lock "refresh_cache-d6215939-5e06-425d-b947-224eebb8386b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1338.123882] env[63538]: DEBUG oslo_concurrency.lockutils [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.142s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1338.417531] env[63538]: DEBUG oslo_vmware.api [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102055, 'name': ReconfigVM_Task, 'duration_secs': 0.172281} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.417915] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1338.418251] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a008cd6c-d1a4-4581-a5a5-eba64b485a84 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.424636] env[63538]: DEBUG oslo_vmware.api [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1338.424636] env[63538]: value = "task-5102057" [ 1338.424636] env[63538]: _type = "Task" [ 1338.424636] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.432459] env[63538]: DEBUG oslo_vmware.api [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102057, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.631053] env[63538]: DEBUG oslo_concurrency.lockutils [None req-60631ae1-a0e5-4d14-bc4c-6d3bec2d9aca tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "d6215939-5e06-425d-b947-224eebb8386b" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 20.914s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1338.632058] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "d6215939-5e06-425d-b947-224eebb8386b" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.640s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1338.632058] env[63538]: INFO nova.compute.manager [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Unshelving [ 1338.935333] env[63538]: DEBUG oslo_vmware.api [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102057, 'name': PowerOnVM_Task, 'duration_secs': 0.391432} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.935619] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1338.938513] env[63538]: DEBUG nova.compute.manager [None req-d7fe6a73-fb00-4378-ae99-f639327fe698 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1338.939327] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673e7479-1303-417c-bb49-c8ae4d89b157 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.660110] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1339.660530] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1339.660859] env[63538]: DEBUG nova.objects.instance [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lazy-loading 'pci_requests' on Instance uuid d6215939-5e06-425d-b947-224eebb8386b {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1340.165740] env[63538]: DEBUG nova.objects.instance [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lazy-loading 'numa_topology' on Instance uuid d6215939-5e06-425d-b947-224eebb8386b {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1340.339221] env[63538]: INFO nova.compute.manager [None req-a5cc3c45-3c17-42d4-a0a3-21434886704a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Unrescuing [ 1340.339548] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a5cc3c45-3c17-42d4-a0a3-21434886704a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquiring lock "refresh_cache-40d2c269-449b-4b1e-9422-abcfb6543c11" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1340.339708] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a5cc3c45-3c17-42d4-a0a3-21434886704a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquired lock "refresh_cache-40d2c269-449b-4b1e-9422-abcfb6543c11" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1340.339885] env[63538]: DEBUG nova.network.neutron [None req-a5cc3c45-3c17-42d4-a0a3-21434886704a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1340.591915] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-2859cd54-8b2e-4d0a-9744-e303b4570f1b tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Volume attach. Driver type: vmdk {{(pid=63538) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1340.592218] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-2859cd54-8b2e-4d0a-9744-e303b4570f1b tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992569', 'volume_id': '795bf84c-2544-4215-8a47-c7bd3b14f464', 'name': 'volume-795bf84c-2544-4215-8a47-c7bd3b14f464', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '579c71bd-24f0-4257-856c-b24ddb2b9dba', 'attached_at': '', 'detached_at': '', 'volume_id': '795bf84c-2544-4215-8a47-c7bd3b14f464', 'serial': '795bf84c-2544-4215-8a47-c7bd3b14f464'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1340.593134] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb99c94e-731a-406d-89cc-bae0a60dab0a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.612089] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c758d0f-4a63-493e-b7e5-f2c003d57323 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.637768] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-2859cd54-8b2e-4d0a-9744-e303b4570f1b tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Reconfiguring VM instance instance-0000007b to attach disk [datastore2] volume-795bf84c-2544-4215-8a47-c7bd3b14f464/volume-795bf84c-2544-4215-8a47-c7bd3b14f464.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1340.638122] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff0f2383-8b23-4ce1-9956-803a0de3bf39 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.656903] env[63538]: DEBUG oslo_vmware.api [None req-2859cd54-8b2e-4d0a-9744-e303b4570f1b tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1340.656903] env[63538]: value = "task-5102058" [ 1340.656903] env[63538]: _type = "Task" [ 1340.656903] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.665377] env[63538]: DEBUG oslo_vmware.api [None req-2859cd54-8b2e-4d0a-9744-e303b4570f1b tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102058, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.667941] env[63538]: INFO nova.compute.claims [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1341.038341] env[63538]: DEBUG nova.network.neutron [None req-a5cc3c45-3c17-42d4-a0a3-21434886704a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Updating instance_info_cache with network_info: [{"id": "f27718d0-5d86-447d-ad7b-597b5af98ab8", "address": "fa:16:3e:3f:dd:e0", "network": {"id": "59211045-9e6b-4999-8ba5-54e53c962714", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1134659557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "329e471d86b9451b86b0b28b2824eae5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a06a63d6-2aeb-4084-8022-f804cac3fa74", "external-id": "nsx-vlan-transportzone-797", "segmentation_id": 797, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf27718d0-5d", "ovs_interfaceid": "f27718d0-5d86-447d-ad7b-597b5af98ab8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1341.167508] env[63538]: DEBUG oslo_vmware.api [None req-2859cd54-8b2e-4d0a-9744-e303b4570f1b tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102058, 'name': ReconfigVM_Task, 'duration_secs': 0.36762} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.167739] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-2859cd54-8b2e-4d0a-9744-e303b4570f1b tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Reconfigured VM instance instance-0000007b to attach disk [datastore2] volume-795bf84c-2544-4215-8a47-c7bd3b14f464/volume-795bf84c-2544-4215-8a47-c7bd3b14f464.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1341.174655] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cfedb633-9a67-44f6-a52b-19602462dda9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.191972] env[63538]: DEBUG oslo_vmware.api [None req-2859cd54-8b2e-4d0a-9744-e303b4570f1b tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1341.191972] env[63538]: value = "task-5102059" [ 1341.191972] env[63538]: _type = "Task" [ 1341.191972] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.203217] env[63538]: DEBUG oslo_vmware.api [None req-2859cd54-8b2e-4d0a-9744-e303b4570f1b tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102059, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.541032] env[63538]: DEBUG oslo_concurrency.lockutils [None req-a5cc3c45-3c17-42d4-a0a3-21434886704a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Releasing lock "refresh_cache-40d2c269-449b-4b1e-9422-abcfb6543c11" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1341.541756] env[63538]: DEBUG nova.objects.instance [None req-a5cc3c45-3c17-42d4-a0a3-21434886704a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Lazy-loading 'flavor' on Instance uuid 40d2c269-449b-4b1e-9422-abcfb6543c11 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1341.703039] env[63538]: DEBUG oslo_vmware.api [None req-2859cd54-8b2e-4d0a-9744-e303b4570f1b tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102059, 'name': ReconfigVM_Task, 'duration_secs': 0.151914} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.703509] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-2859cd54-8b2e-4d0a-9744-e303b4570f1b tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992569', 'volume_id': '795bf84c-2544-4215-8a47-c7bd3b14f464', 'name': 'volume-795bf84c-2544-4215-8a47-c7bd3b14f464', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '579c71bd-24f0-4257-856c-b24ddb2b9dba', 'attached_at': '', 'detached_at': '', 'volume_id': '795bf84c-2544-4215-8a47-c7bd3b14f464', 'serial': '795bf84c-2544-4215-8a47-c7bd3b14f464'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1341.752325] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085cf649-0c56-4c01-8c2e-fd4b196c33e1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.760587] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79c78b75-edc2-4c9c-af5a-df87926a35ad {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.791709] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7cc992f-1680-4e28-bed0-1129ecf85781 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.799108] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1c05e96-f89a-4301-9a9a-f8f73da4a741 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.812384] env[63538]: DEBUG nova.compute.provider_tree [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1342.047312] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa04ab00-a850-41f8-bbe5-2b133a86997d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.068101] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5cc3c45-3c17-42d4-a0a3-21434886704a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1342.068421] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f8eb6287-f3a9-4647-8cd8-a448399fe395 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.074884] env[63538]: DEBUG oslo_vmware.api [None req-a5cc3c45-3c17-42d4-a0a3-21434886704a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1342.074884] env[63538]: value = "task-5102060" [ 1342.074884] env[63538]: _type = "Task" [ 1342.074884] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.083137] env[63538]: DEBUG oslo_vmware.api [None req-a5cc3c45-3c17-42d4-a0a3-21434886704a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102060, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.315761] env[63538]: DEBUG nova.scheduler.client.report [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1342.586149] env[63538]: DEBUG oslo_vmware.api [None req-a5cc3c45-3c17-42d4-a0a3-21434886704a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102060, 'name': PowerOffVM_Task, 'duration_secs': 0.244136} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.586149] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5cc3c45-3c17-42d4-a0a3-21434886704a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1342.590762] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5cc3c45-3c17-42d4-a0a3-21434886704a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Reconfiguring VM instance instance-0000007d to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1342.591064] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c826b5b-b5ca-4489-ab84-2ee71e56a732 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.610503] env[63538]: DEBUG oslo_vmware.api [None req-a5cc3c45-3c17-42d4-a0a3-21434886704a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1342.610503] env[63538]: value = "task-5102061" [ 1342.610503] env[63538]: _type = "Task" [ 1342.610503] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.618697] env[63538]: DEBUG oslo_vmware.api [None req-a5cc3c45-3c17-42d4-a0a3-21434886704a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102061, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.747710] env[63538]: DEBUG nova.objects.instance [None req-2859cd54-8b2e-4d0a-9744-e303b4570f1b tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lazy-loading 'flavor' on Instance uuid 579c71bd-24f0-4257-856c-b24ddb2b9dba {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1342.820844] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.160s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1342.849569] env[63538]: INFO nova.network.neutron [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Updating port 5f276ada-dc8e-4558-a169-bfcaf25172e8 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1343.121114] env[63538]: DEBUG oslo_vmware.api [None req-a5cc3c45-3c17-42d4-a0a3-21434886704a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102061, 'name': ReconfigVM_Task, 'duration_secs': 0.223538} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.121516] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5cc3c45-3c17-42d4-a0a3-21434886704a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Reconfigured VM instance instance-0000007d to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1343.121776] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5cc3c45-3c17-42d4-a0a3-21434886704a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1343.122076] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b6fe46b0-8f14-403d-b8c3-ca32907c1447 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.129460] env[63538]: DEBUG oslo_vmware.api [None req-a5cc3c45-3c17-42d4-a0a3-21434886704a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1343.129460] env[63538]: value = "task-5102062" [ 1343.129460] env[63538]: _type = "Task" [ 1343.129460] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.137823] env[63538]: DEBUG oslo_vmware.api [None req-a5cc3c45-3c17-42d4-a0a3-21434886704a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102062, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.252578] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2859cd54-8b2e-4d0a-9744-e303b4570f1b tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "579c71bd-24f0-4257-856c-b24ddb2b9dba" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.256s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1343.476790] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fbd2d96b-6b3d-46aa-93c5-355831fe58b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "579c71bd-24f0-4257-856c-b24ddb2b9dba" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1343.477073] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fbd2d96b-6b3d-46aa-93c5-355831fe58b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "579c71bd-24f0-4257-856c-b24ddb2b9dba" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1343.641575] env[63538]: DEBUG oslo_vmware.api [None req-a5cc3c45-3c17-42d4-a0a3-21434886704a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102062, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.981155] env[63538]: INFO nova.compute.manager [None req-fbd2d96b-6b3d-46aa-93c5-355831fe58b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Detaching volume 795bf84c-2544-4215-8a47-c7bd3b14f464 [ 1344.030264] env[63538]: INFO nova.virt.block_device [None req-fbd2d96b-6b3d-46aa-93c5-355831fe58b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Attempting to driver detach volume 795bf84c-2544-4215-8a47-c7bd3b14f464 from mountpoint /dev/sdb [ 1344.030605] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbd2d96b-6b3d-46aa-93c5-355831fe58b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Volume detach. Driver type: vmdk {{(pid=63538) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1344.030817] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbd2d96b-6b3d-46aa-93c5-355831fe58b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992569', 'volume_id': '795bf84c-2544-4215-8a47-c7bd3b14f464', 'name': 'volume-795bf84c-2544-4215-8a47-c7bd3b14f464', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '579c71bd-24f0-4257-856c-b24ddb2b9dba', 'attached_at': '', 'detached_at': '', 'volume_id': '795bf84c-2544-4215-8a47-c7bd3b14f464', 'serial': '795bf84c-2544-4215-8a47-c7bd3b14f464'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1344.031850] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc3b7635-b5d2-412f-bae4-ea1aab0f835f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.055380] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-642dcc67-fe94-4d5c-b915-7991f3cbd3dc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.063144] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba33f64-d405-49e1-a612-ae879f40196e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.084479] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b46b2b6-e1d1-4b87-903d-797774428f0d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.100702] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbd2d96b-6b3d-46aa-93c5-355831fe58b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] The volume has not been displaced from its original location: [datastore2] volume-795bf84c-2544-4215-8a47-c7bd3b14f464/volume-795bf84c-2544-4215-8a47-c7bd3b14f464.vmdk. No consolidation needed. {{(pid=63538) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1344.106132] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbd2d96b-6b3d-46aa-93c5-355831fe58b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Reconfiguring VM instance instance-0000007b to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1344.106530] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5e09215-32d8-4206-a2d8-d2b4d8568a50 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.125030] env[63538]: DEBUG oslo_vmware.api [None req-fbd2d96b-6b3d-46aa-93c5-355831fe58b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1344.125030] env[63538]: value = "task-5102063" [ 1344.125030] env[63538]: _type = "Task" [ 1344.125030] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.137100] env[63538]: DEBUG oslo_vmware.api [None req-fbd2d96b-6b3d-46aa-93c5-355831fe58b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102063, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.145665] env[63538]: DEBUG oslo_vmware.api [None req-a5cc3c45-3c17-42d4-a0a3-21434886704a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102062, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.285830] env[63538]: DEBUG nova.compute.manager [req-09d42f64-d6de-42dc-938a-d1df8516b8ba req-52cd932f-82bf-48b3-8ca7-f0d8b0ab9f3f service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Received event network-vif-plugged-5f276ada-dc8e-4558-a169-bfcaf25172e8 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1344.286105] env[63538]: DEBUG oslo_concurrency.lockutils [req-09d42f64-d6de-42dc-938a-d1df8516b8ba req-52cd932f-82bf-48b3-8ca7-f0d8b0ab9f3f service nova] Acquiring lock "d6215939-5e06-425d-b947-224eebb8386b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.286431] env[63538]: DEBUG oslo_concurrency.lockutils [req-09d42f64-d6de-42dc-938a-d1df8516b8ba req-52cd932f-82bf-48b3-8ca7-f0d8b0ab9f3f service nova] Lock "d6215939-5e06-425d-b947-224eebb8386b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1344.286565] env[63538]: DEBUG oslo_concurrency.lockutils [req-09d42f64-d6de-42dc-938a-d1df8516b8ba req-52cd932f-82bf-48b3-8ca7-f0d8b0ab9f3f service nova] Lock "d6215939-5e06-425d-b947-224eebb8386b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1344.286697] env[63538]: DEBUG nova.compute.manager [req-09d42f64-d6de-42dc-938a-d1df8516b8ba req-52cd932f-82bf-48b3-8ca7-f0d8b0ab9f3f service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] No waiting events found dispatching network-vif-plugged-5f276ada-dc8e-4558-a169-bfcaf25172e8 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1344.286895] env[63538]: WARNING nova.compute.manager [req-09d42f64-d6de-42dc-938a-d1df8516b8ba req-52cd932f-82bf-48b3-8ca7-f0d8b0ab9f3f service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Received unexpected event network-vif-plugged-5f276ada-dc8e-4558-a169-bfcaf25172e8 for instance with vm_state shelved_offloaded and task_state spawning. [ 1344.383181] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "refresh_cache-d6215939-5e06-425d-b947-224eebb8386b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1344.383430] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquired lock "refresh_cache-d6215939-5e06-425d-b947-224eebb8386b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.383625] env[63538]: DEBUG nova.network.neutron [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1344.635497] env[63538]: DEBUG oslo_vmware.api [None req-fbd2d96b-6b3d-46aa-93c5-355831fe58b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102063, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.645730] env[63538]: DEBUG oslo_vmware.api [None req-a5cc3c45-3c17-42d4-a0a3-21434886704a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102062, 'name': PowerOnVM_Task, 'duration_secs': 1.025618} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.645990] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5cc3c45-3c17-42d4-a0a3-21434886704a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1344.646241] env[63538]: DEBUG nova.compute.manager [None req-a5cc3c45-3c17-42d4-a0a3-21434886704a tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1344.646977] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24582dc8-9742-463d-a79d-4a145198b953 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.137697] env[63538]: DEBUG oslo_vmware.api [None req-fbd2d96b-6b3d-46aa-93c5-355831fe58b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102063, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.340436] env[63538]: DEBUG nova.network.neutron [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Updating instance_info_cache with network_info: [{"id": "5f276ada-dc8e-4558-a169-bfcaf25172e8", "address": "fa:16:3e:59:b6:e8", "network": {"id": "690e6150-6e85-472d-baeb-85e69afd037b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1831468396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a06b7cc1ab24ba584bbe970e4fc5e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f276ada-dc", "ovs_interfaceid": "5f276ada-dc8e-4558-a169-bfcaf25172e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.637547] env[63538]: DEBUG oslo_vmware.api [None req-fbd2d96b-6b3d-46aa-93c5-355831fe58b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102063, 'name': ReconfigVM_Task, 'duration_secs': 1.242019} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.637844] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbd2d96b-6b3d-46aa-93c5-355831fe58b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Reconfigured VM instance instance-0000007b to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1345.642744] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8c4cfb5-a4f2-4e08-8a09-d3e1fb9720e1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.658516] env[63538]: DEBUG oslo_vmware.api [None req-fbd2d96b-6b3d-46aa-93c5-355831fe58b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1345.658516] env[63538]: value = "task-5102064" [ 1345.658516] env[63538]: _type = "Task" [ 1345.658516] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.667571] env[63538]: DEBUG oslo_vmware.api [None req-fbd2d96b-6b3d-46aa-93c5-355831fe58b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102064, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.843413] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Releasing lock "refresh_cache-d6215939-5e06-425d-b947-224eebb8386b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.871797] env[63538]: DEBUG nova.virt.hardware [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='57c0ef19f17ff76565424adc16617767',container_format='bare',created_at=2025-12-12T13:03:04Z,direct_url=,disk_format='vmdk',id=e467b0b6-bd80-444b-b4c5-e5bc6eaff630,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-1399510510-shelved',owner='1a06b7cc1ab24ba584bbe970e4fc5e81',properties=ImageMetaProps,protected=,size=31669248,status='active',tags=,updated_at=2025-12-12T13:03:18Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1345.872074] env[63538]: DEBUG nova.virt.hardware [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1345.872454] env[63538]: DEBUG nova.virt.hardware [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1345.872454] env[63538]: DEBUG nova.virt.hardware [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1345.872596] env[63538]: DEBUG nova.virt.hardware [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1345.872755] env[63538]: DEBUG nova.virt.hardware [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1345.872958] env[63538]: DEBUG nova.virt.hardware [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1345.873685] env[63538]: DEBUG nova.virt.hardware [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1345.873685] env[63538]: DEBUG nova.virt.hardware [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1345.873685] env[63538]: DEBUG nova.virt.hardware [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1345.873685] env[63538]: DEBUG nova.virt.hardware [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1345.875087] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f894dc-f5b2-40d5-8e4f-12a8e1244ff5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.883841] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1667648f-1563-4efb-9bd8-4777c97763f9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.901765] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:b6:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e31264e2-3e0a-4dfb-ba1f-6389d7d47548', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5f276ada-dc8e-4558-a169-bfcaf25172e8', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1345.907916] env[63538]: DEBUG oslo.service.loopingcall [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1345.908641] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6215939-5e06-425d-b947-224eebb8386b] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1345.908877] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f8ed3d20-3d93-4a59-8e1a-b5b68b1d5c20 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.931021] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1345.931021] env[63538]: value = "task-5102065" [ 1345.931021] env[63538]: _type = "Task" [ 1345.931021] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.939238] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5102065, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.170678] env[63538]: DEBUG oslo_vmware.api [None req-fbd2d96b-6b3d-46aa-93c5-355831fe58b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102064, 'name': ReconfigVM_Task, 'duration_secs': 0.152592} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.170678] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbd2d96b-6b3d-46aa-93c5-355831fe58b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992569', 'volume_id': '795bf84c-2544-4215-8a47-c7bd3b14f464', 'name': 'volume-795bf84c-2544-4215-8a47-c7bd3b14f464', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '579c71bd-24f0-4257-856c-b24ddb2b9dba', 'attached_at': '', 'detached_at': '', 'volume_id': '795bf84c-2544-4215-8a47-c7bd3b14f464', 'serial': '795bf84c-2544-4215-8a47-c7bd3b14f464'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1346.319889] env[63538]: DEBUG nova.compute.manager [req-8229ab44-baea-49b5-afd2-d1da779e83da req-b5d3b818-18c8-4e85-87b8-41184cdb8257 service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Received event network-changed-5f276ada-dc8e-4558-a169-bfcaf25172e8 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1346.320112] env[63538]: DEBUG nova.compute.manager [req-8229ab44-baea-49b5-afd2-d1da779e83da req-b5d3b818-18c8-4e85-87b8-41184cdb8257 service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Refreshing instance network info cache due to event network-changed-5f276ada-dc8e-4558-a169-bfcaf25172e8. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1346.320342] env[63538]: DEBUG oslo_concurrency.lockutils [req-8229ab44-baea-49b5-afd2-d1da779e83da req-b5d3b818-18c8-4e85-87b8-41184cdb8257 service nova] Acquiring lock "refresh_cache-d6215939-5e06-425d-b947-224eebb8386b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1346.320492] env[63538]: DEBUG oslo_concurrency.lockutils [req-8229ab44-baea-49b5-afd2-d1da779e83da req-b5d3b818-18c8-4e85-87b8-41184cdb8257 service nova] Acquired lock "refresh_cache-d6215939-5e06-425d-b947-224eebb8386b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.320662] env[63538]: DEBUG nova.network.neutron [req-8229ab44-baea-49b5-afd2-d1da779e83da req-b5d3b818-18c8-4e85-87b8-41184cdb8257 service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Refreshing network info cache for port 5f276ada-dc8e-4558-a169-bfcaf25172e8 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1346.342028] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquiring lock "40d2c269-449b-4b1e-9422-abcfb6543c11" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.342500] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Lock "40d2c269-449b-4b1e-9422-abcfb6543c11" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.342840] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquiring lock "40d2c269-449b-4b1e-9422-abcfb6543c11-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.343162] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Lock "40d2c269-449b-4b1e-9422-abcfb6543c11-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.343483] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Lock "40d2c269-449b-4b1e-9422-abcfb6543c11-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1346.345899] env[63538]: INFO nova.compute.manager [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Terminating instance [ 1346.347732] env[63538]: DEBUG nova.compute.manager [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1346.347949] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1346.348856] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d620aa9-c5ad-48da-a120-94f7502d4627 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.356815] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1346.357073] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-43a81208-a4e8-45be-bbc2-04cce39e8227 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.363367] env[63538]: DEBUG oslo_vmware.api [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1346.363367] env[63538]: value = "task-5102066" [ 1346.363367] env[63538]: _type = "Task" [ 1346.363367] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.371082] env[63538]: DEBUG oslo_vmware.api [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102066, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.441061] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5102065, 'name': CreateVM_Task} progress is 25%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.716563] env[63538]: DEBUG nova.objects.instance [None req-fbd2d96b-6b3d-46aa-93c5-355831fe58b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lazy-loading 'flavor' on Instance uuid 579c71bd-24f0-4257-856c-b24ddb2b9dba {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1346.873300] env[63538]: DEBUG oslo_vmware.api [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102066, 'name': PowerOffVM_Task, 'duration_secs': 0.195298} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.873583] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1346.873783] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1346.874054] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ada06bb6-3fd1-42bc-bd93-0e4b02ceb41f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.938336] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1346.938559] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1346.938743] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Deleting the datastore file [datastore2] 40d2c269-449b-4b1e-9422-abcfb6543c11 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1346.939705] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-05ba770b-b399-4561-91e6-2a49ec11605e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.944803] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5102065, 'name': CreateVM_Task, 'duration_secs': 0.56347} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.945846] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6215939-5e06-425d-b947-224eebb8386b] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1346.946015] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e467b0b6-bd80-444b-b4c5-e5bc6eaff630" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1346.946200] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e467b0b6-bd80-444b-b4c5-e5bc6eaff630" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.946601] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e467b0b6-bd80-444b-b4c5-e5bc6eaff630" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1346.946866] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad76e80f-83e1-4671-889c-6cac9f2844bb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.949696] env[63538]: DEBUG oslo_vmware.api [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1346.949696] env[63538]: value = "task-5102068" [ 1346.949696] env[63538]: _type = "Task" [ 1346.949696] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.954076] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1346.954076] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5238a166-4d97-1eda-9dd3-0225f1e34b8c" [ 1346.954076] env[63538]: _type = "Task" [ 1346.954076] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.961768] env[63538]: DEBUG oslo_vmware.api [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102068, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.966819] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]5238a166-4d97-1eda-9dd3-0225f1e34b8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.107277] env[63538]: DEBUG nova.network.neutron [req-8229ab44-baea-49b5-afd2-d1da779e83da req-b5d3b818-18c8-4e85-87b8-41184cdb8257 service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Updated VIF entry in instance network info cache for port 5f276ada-dc8e-4558-a169-bfcaf25172e8. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1347.107816] env[63538]: DEBUG nova.network.neutron [req-8229ab44-baea-49b5-afd2-d1da779e83da req-b5d3b818-18c8-4e85-87b8-41184cdb8257 service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Updating instance_info_cache with network_info: [{"id": "5f276ada-dc8e-4558-a169-bfcaf25172e8", "address": "fa:16:3e:59:b6:e8", "network": {"id": "690e6150-6e85-472d-baeb-85e69afd037b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1831468396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a06b7cc1ab24ba584bbe970e4fc5e81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f276ada-dc", "ovs_interfaceid": "5f276ada-dc8e-4558-a169-bfcaf25172e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1347.464712] env[63538]: DEBUG oslo_vmware.api [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102068, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157673} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.468164] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1347.468381] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1347.468567] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1347.468749] env[63538]: INFO nova.compute.manager [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1347.468987] env[63538]: DEBUG oslo.service.loopingcall [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1347.469278] env[63538]: DEBUG nova.compute.manager [-] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1347.469380] env[63538]: DEBUG nova.network.neutron [-] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1347.471076] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e467b0b6-bd80-444b-b4c5-e5bc6eaff630" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1347.471313] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Processing image e467b0b6-bd80-444b-b4c5-e5bc6eaff630 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1347.471540] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e467b0b6-bd80-444b-b4c5-e5bc6eaff630/e467b0b6-bd80-444b-b4c5-e5bc6eaff630.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1347.471687] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e467b0b6-bd80-444b-b4c5-e5bc6eaff630/e467b0b6-bd80-444b-b4c5-e5bc6eaff630.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.471862] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1347.472112] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-75a8c879-86b3-4dc0-b391-cf7565f639e4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.481203] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1347.481402] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1347.482091] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b1e1413-e0a3-4866-b168-94d52d201736 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.487195] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1347.487195] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ab34d2-2d13-20be-28e5-4da25c49eb75" [ 1347.487195] env[63538]: _type = "Task" [ 1347.487195] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.494911] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ab34d2-2d13-20be-28e5-4da25c49eb75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.610558] env[63538]: DEBUG oslo_concurrency.lockutils [req-8229ab44-baea-49b5-afd2-d1da779e83da req-b5d3b818-18c8-4e85-87b8-41184cdb8257 service nova] Releasing lock "refresh_cache-d6215939-5e06-425d-b947-224eebb8386b" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1347.723534] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fbd2d96b-6b3d-46aa-93c5-355831fe58b1 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "579c71bd-24f0-4257-856c-b24ddb2b9dba" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.246s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.997505] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Preparing fetch location {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1347.997806] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Fetch image to [datastore2] OSTACK_IMG_0f4d6467-80f7-45ec-9dc4-b14a1f62254d/OSTACK_IMG_0f4d6467-80f7-45ec-9dc4-b14a1f62254d.vmdk {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1347.998024] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Downloading stream optimized image e467b0b6-bd80-444b-b4c5-e5bc6eaff630 to [datastore2] OSTACK_IMG_0f4d6467-80f7-45ec-9dc4-b14a1f62254d/OSTACK_IMG_0f4d6467-80f7-45ec-9dc4-b14a1f62254d.vmdk on the data store datastore2 as vApp {{(pid=63538) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1347.998246] env[63538]: DEBUG nova.virt.vmwareapi.images [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Downloading image file data e467b0b6-bd80-444b-b4c5-e5bc6eaff630 to the ESX as VM named 'OSTACK_IMG_0f4d6467-80f7-45ec-9dc4-b14a1f62254d' {{(pid=63538) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1348.074726] env[63538]: DEBUG oslo_vmware.rw_handles [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1348.074726] env[63538]: value = "resgroup-9" [ 1348.074726] env[63538]: _type = "ResourcePool" [ 1348.074726] env[63538]: }. {{(pid=63538) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1348.075119] env[63538]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-3ce4a81e-5b3c-4774-8461-40c4044f7cad {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.097279] env[63538]: DEBUG oslo_vmware.rw_handles [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lease: (returnval){ [ 1348.097279] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522e6af7-cd14-111c-4ca5-e999373eaba3" [ 1348.097279] env[63538]: _type = "HttpNfcLease" [ 1348.097279] env[63538]: } obtained for vApp import into resource pool (val){ [ 1348.097279] env[63538]: value = "resgroup-9" [ 1348.097279] env[63538]: _type = "ResourcePool" [ 1348.097279] env[63538]: }. {{(pid=63538) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1348.097548] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the lease: (returnval){ [ 1348.097548] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522e6af7-cd14-111c-4ca5-e999373eaba3" [ 1348.097548] env[63538]: _type = "HttpNfcLease" [ 1348.097548] env[63538]: } to be ready. {{(pid=63538) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1348.104288] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1348.104288] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522e6af7-cd14-111c-4ca5-e999373eaba3" [ 1348.104288] env[63538]: _type = "HttpNfcLease" [ 1348.104288] env[63538]: } is initializing. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1348.190875] env[63538]: DEBUG nova.network.neutron [-] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.345958] env[63538]: DEBUG nova.compute.manager [req-870862b4-bd25-4828-885e-a603e69e2623 req-c204024f-ce39-4edc-acc8-ee0797ec55e5 service nova] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Received event network-vif-deleted-f27718d0-5d86-447d-ad7b-597b5af98ab8 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1348.605633] env[63538]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1348.605633] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522e6af7-cd14-111c-4ca5-e999373eaba3" [ 1348.605633] env[63538]: _type = "HttpNfcLease" [ 1348.605633] env[63538]: } is ready. {{(pid=63538) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1348.606113] env[63538]: DEBUG oslo_vmware.rw_handles [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1348.606113] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]522e6af7-cd14-111c-4ca5-e999373eaba3" [ 1348.606113] env[63538]: _type = "HttpNfcLease" [ 1348.606113] env[63538]: }. {{(pid=63538) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1348.606617] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3401646c-72c5-4864-9821-87d9abd3eccb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.613871] env[63538]: DEBUG oslo_vmware.rw_handles [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521a040a-1d9b-2a24-2a61-8d443cbb0ac8/disk-0.vmdk from lease info. {{(pid=63538) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1348.614081] env[63538]: DEBUG oslo_vmware.rw_handles [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Creating HTTP connection to write to file with size = 31669248 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521a040a-1d9b-2a24-2a61-8d443cbb0ac8/disk-0.vmdk. {{(pid=63538) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1348.676452] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a50bb5c1-63d6-469f-9407-9e901bf5e1c1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.693283] env[63538]: INFO nova.compute.manager [-] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Took 1.22 seconds to deallocate network for instance. [ 1348.808634] env[63538]: DEBUG oslo_concurrency.lockutils [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "579c71bd-24f0-4257-856c-b24ddb2b9dba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1348.808931] env[63538]: DEBUG oslo_concurrency.lockutils [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "579c71bd-24f0-4257-856c-b24ddb2b9dba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.809238] env[63538]: DEBUG oslo_concurrency.lockutils [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "579c71bd-24f0-4257-856c-b24ddb2b9dba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1348.809450] env[63538]: DEBUG oslo_concurrency.lockutils [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "579c71bd-24f0-4257-856c-b24ddb2b9dba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.809664] env[63538]: DEBUG oslo_concurrency.lockutils [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "579c71bd-24f0-4257-856c-b24ddb2b9dba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.811963] env[63538]: INFO nova.compute.manager [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Terminating instance [ 1348.813884] env[63538]: DEBUG nova.compute.manager [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1348.814099] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1348.815014] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e52ab9e0-f4ca-4bb9-990b-869466428f95 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.824983] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1348.825264] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-23a6261c-2075-4d3f-bb0e-5a4219fb7a83 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.832653] env[63538]: DEBUG oslo_vmware.api [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1348.832653] env[63538]: value = "task-5102070" [ 1348.832653] env[63538]: _type = "Task" [ 1348.832653] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.841829] env[63538]: DEBUG oslo_vmware.api [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102070, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.200311] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1349.200687] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1349.200938] env[63538]: DEBUG nova.objects.instance [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Lazy-loading 'resources' on Instance uuid 40d2c269-449b-4b1e-9422-abcfb6543c11 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1349.345859] env[63538]: DEBUG oslo_vmware.api [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102070, 'name': PowerOffVM_Task, 'duration_secs': 0.196915} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.348890] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1349.349146] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1349.349473] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-469f8ef8-a515-464e-b638-ae97bdb2991e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.430755] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1349.431076] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1349.431289] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Deleting the datastore file [datastore1] 579c71bd-24f0-4257-856c-b24ddb2b9dba {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1349.431583] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-25925cb4-e925-4803-860c-4507ca339061 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.447229] env[63538]: DEBUG oslo_vmware.api [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1349.447229] env[63538]: value = "task-5102072" [ 1349.447229] env[63538]: _type = "Task" [ 1349.447229] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.456390] env[63538]: DEBUG oslo_vmware.api [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102072, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.775311] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e36874be-2dd3-455e-92b5-546921116cfe {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.783889] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990929b3-bb42-464a-8b97-7f2a2c8ab5f0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.818733] env[63538]: DEBUG oslo_vmware.rw_handles [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Completed reading data from the image iterator. {{(pid=63538) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1349.818930] env[63538]: DEBUG oslo_vmware.rw_handles [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521a040a-1d9b-2a24-2a61-8d443cbb0ac8/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1349.819827] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c698fb8-5d4f-40d7-9db3-1f02231f0854 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.822973] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f44571-1617-41a4-a291-b4ba9e02af6a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.832369] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-895453ac-b15f-4bd9-92f5-e5e93522f85d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.836255] env[63538]: DEBUG oslo_vmware.rw_handles [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521a040a-1d9b-2a24-2a61-8d443cbb0ac8/disk-0.vmdk is in state: ready. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1349.836425] env[63538]: DEBUG oslo_vmware.rw_handles [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521a040a-1d9b-2a24-2a61-8d443cbb0ac8/disk-0.vmdk. {{(pid=63538) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1349.836661] env[63538]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-7a88f522-b801-4b98-ac9b-25e111a94df6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.848726] env[63538]: DEBUG nova.compute.provider_tree [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1349.959203] env[63538]: DEBUG oslo_vmware.api [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102072, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168024} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.959544] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1349.959757] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1349.959963] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1349.960180] env[63538]: INFO nova.compute.manager [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1349.960473] env[63538]: DEBUG oslo.service.loopingcall [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1349.960692] env[63538]: DEBUG nova.compute.manager [-] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1349.960793] env[63538]: DEBUG nova.network.neutron [-] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1350.041929] env[63538]: DEBUG oslo_vmware.rw_handles [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521a040a-1d9b-2a24-2a61-8d443cbb0ac8/disk-0.vmdk. {{(pid=63538) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1350.042192] env[63538]: INFO nova.virt.vmwareapi.images [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Downloaded image file data e467b0b6-bd80-444b-b4c5-e5bc6eaff630 [ 1350.043039] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e77445a8-716a-46b5-aea0-fe9f60848608 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.059681] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9739f3ca-6faa-4b05-85e8-ba8a17e7d042 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.084192] env[63538]: INFO nova.virt.vmwareapi.images [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] The imported VM was unregistered [ 1350.087406] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Caching image {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1350.087692] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Creating directory with path [datastore2] devstack-image-cache_base/e467b0b6-bd80-444b-b4c5-e5bc6eaff630 {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1350.088046] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b92c797-ebe1-4724-9047-f20d7369ee42 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.121703] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Created directory with path [datastore2] devstack-image-cache_base/e467b0b6-bd80-444b-b4c5-e5bc6eaff630 {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1350.121915] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_0f4d6467-80f7-45ec-9dc4-b14a1f62254d/OSTACK_IMG_0f4d6467-80f7-45ec-9dc4-b14a1f62254d.vmdk to [datastore2] devstack-image-cache_base/e467b0b6-bd80-444b-b4c5-e5bc6eaff630/e467b0b6-bd80-444b-b4c5-e5bc6eaff630.vmdk. {{(pid=63538) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1350.122248] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-c218f86e-3e02-424a-aacd-99687f3e6770 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.130290] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1350.130290] env[63538]: value = "task-5102074" [ 1350.130290] env[63538]: _type = "Task" [ 1350.130290] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.138702] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102074, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.352264] env[63538]: DEBUG nova.scheduler.client.report [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1350.479741] env[63538]: DEBUG nova.compute.manager [req-4d337e37-f80e-4ab2-8c91-e0bc15fce54e req-9986c81d-4d12-43f5-8b6e-fe3c9c8da55e service nova] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Received event network-vif-deleted-8e02ffa4-5076-4042-a6e3-4a7142802a93 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1350.479820] env[63538]: INFO nova.compute.manager [req-4d337e37-f80e-4ab2-8c91-e0bc15fce54e req-9986c81d-4d12-43f5-8b6e-fe3c9c8da55e service nova] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Neutron deleted interface 8e02ffa4-5076-4042-a6e3-4a7142802a93; detaching it from the instance and deleting it from the info cache [ 1350.479997] env[63538]: DEBUG nova.network.neutron [req-4d337e37-f80e-4ab2-8c91-e0bc15fce54e req-9986c81d-4d12-43f5-8b6e-fe3c9c8da55e service nova] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1350.641739] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102074, 'name': MoveVirtualDisk_Task} progress is 12%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.857285] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.656s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1350.881476] env[63538]: INFO nova.scheduler.client.report [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Deleted allocations for instance 40d2c269-449b-4b1e-9422-abcfb6543c11 [ 1350.956124] env[63538]: DEBUG nova.network.neutron [-] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1350.983029] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7eb0f53e-5802-4278-bd6a-c7e517f7735b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.994306] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-523877b9-3468-44ab-a39f-cccae35ca194 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.030515] env[63538]: DEBUG nova.compute.manager [req-4d337e37-f80e-4ab2-8c91-e0bc15fce54e req-9986c81d-4d12-43f5-8b6e-fe3c9c8da55e service nova] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Detach interface failed, port_id=8e02ffa4-5076-4042-a6e3-4a7142802a93, reason: Instance 579c71bd-24f0-4257-856c-b24ddb2b9dba could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1351.141585] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102074, 'name': MoveVirtualDisk_Task} progress is 32%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.390958] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b3a403ec-19b8-4794-bf63-9d52b2bc0f79 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Lock "40d2c269-449b-4b1e-9422-abcfb6543c11" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.048s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.459402] env[63538]: INFO nova.compute.manager [-] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Took 1.50 seconds to deallocate network for instance. [ 1351.641951] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102074, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.919701] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquiring lock "2f7bc37b-36c6-404a-82a9-c2b0d4a72439" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1351.920108] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Lock "2f7bc37b-36c6-404a-82a9-c2b0d4a72439" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1351.920595] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquiring lock "2f7bc37b-36c6-404a-82a9-c2b0d4a72439-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1351.920954] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Lock "2f7bc37b-36c6-404a-82a9-c2b0d4a72439-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1351.921207] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Lock "2f7bc37b-36c6-404a-82a9-c2b0d4a72439-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.923973] env[63538]: INFO nova.compute.manager [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Terminating instance [ 1351.926342] env[63538]: DEBUG nova.compute.manager [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1351.926616] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1351.927849] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bdc36b3-92a6-4a78-8b82-821252dd479c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.936720] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1351.937417] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e039c06d-2036-4804-b3f2-edb2b6aefb70 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.945651] env[63538]: DEBUG oslo_vmware.api [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1351.945651] env[63538]: value = "task-5102075" [ 1351.945651] env[63538]: _type = "Task" [ 1351.945651] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.955057] env[63538]: DEBUG oslo_vmware.api [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102075, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.966624] env[63538]: DEBUG oslo_concurrency.lockutils [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1351.966886] env[63538]: DEBUG oslo_concurrency.lockutils [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1351.967131] env[63538]: DEBUG nova.objects.instance [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lazy-loading 'resources' on Instance uuid 579c71bd-24f0-4257-856c-b24ddb2b9dba {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1352.144613] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102074, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.457175] env[63538]: DEBUG oslo_vmware.api [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102075, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.526662] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1e8f82-b2e7-4fcc-af62-da92b75c8193 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.535200] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-211d8615-076b-4c4b-a9dc-ab8737919d78 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.568036] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8acd5b3d-a281-49ef-8ae7-4247c942bdd8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.576638] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f89eec-ae6f-4e8a-b70d-a128ac9ec4b9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.591908] env[63538]: DEBUG nova.compute.provider_tree [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1352.643383] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102074, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.956865] env[63538]: DEBUG oslo_vmware.api [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102075, 'name': PowerOffVM_Task, 'duration_secs': 0.726836} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.957249] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1352.957288] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1352.957541] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-98bee9fb-8e17-4efd-ac56-534990019627 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.035672] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1353.035933] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1353.036106] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Deleting the datastore file [datastore1] 2f7bc37b-36c6-404a-82a9-c2b0d4a72439 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1353.036391] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b6ee48df-641e-4603-8d53-66c0816b9c65 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.045630] env[63538]: DEBUG oslo_vmware.api [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for the task: (returnval){ [ 1353.045630] env[63538]: value = "task-5102077" [ 1353.045630] env[63538]: _type = "Task" [ 1353.045630] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.054377] env[63538]: DEBUG oslo_vmware.api [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102077, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.096631] env[63538]: DEBUG nova.scheduler.client.report [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1353.142875] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102074, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.60754} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.143165] env[63538]: INFO nova.virt.vmwareapi.ds_util [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_0f4d6467-80f7-45ec-9dc4-b14a1f62254d/OSTACK_IMG_0f4d6467-80f7-45ec-9dc4-b14a1f62254d.vmdk to [datastore2] devstack-image-cache_base/e467b0b6-bd80-444b-b4c5-e5bc6eaff630/e467b0b6-bd80-444b-b4c5-e5bc6eaff630.vmdk. [ 1353.143367] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Cleaning up location [datastore2] OSTACK_IMG_0f4d6467-80f7-45ec-9dc4-b14a1f62254d {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1353.143535] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_0f4d6467-80f7-45ec-9dc4-b14a1f62254d {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1353.143795] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-564bad2b-67fb-4777-a025-86c15365a2d0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.150626] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1353.150626] env[63538]: value = "task-5102078" [ 1353.150626] env[63538]: _type = "Task" [ 1353.150626] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.158857] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102078, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.555678] env[63538]: DEBUG oslo_vmware.api [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102077, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.601776] env[63538]: DEBUG oslo_concurrency.lockutils [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.635s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1353.620501] env[63538]: INFO nova.scheduler.client.report [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Deleted allocations for instance 579c71bd-24f0-4257-856c-b24ddb2b9dba [ 1353.662052] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102078, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.310893} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.662365] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1353.662544] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e467b0b6-bd80-444b-b4c5-e5bc6eaff630/e467b0b6-bd80-444b-b4c5-e5bc6eaff630.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1353.662826] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e467b0b6-bd80-444b-b4c5-e5bc6eaff630/e467b0b6-bd80-444b-b4c5-e5bc6eaff630.vmdk to [datastore2] d6215939-5e06-425d-b947-224eebb8386b/d6215939-5e06-425d-b947-224eebb8386b.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1353.663131] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-346fd3e5-3352-43d3-9182-c42717238790 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.669948] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1353.669948] env[63538]: value = "task-5102079" [ 1353.669948] env[63538]: _type = "Task" [ 1353.669948] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.678114] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102079, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.057935] env[63538]: DEBUG oslo_vmware.api [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Task: {'id': task-5102077, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.694949} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.058290] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1354.058461] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1354.058653] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1354.058836] env[63538]: INFO nova.compute.manager [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Took 2.13 seconds to destroy the instance on the hypervisor. [ 1354.059103] env[63538]: DEBUG oslo.service.loopingcall [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1354.059321] env[63538]: DEBUG nova.compute.manager [-] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1354.059420] env[63538]: DEBUG nova.network.neutron [-] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1354.127427] env[63538]: DEBUG oslo_concurrency.lockutils [None req-05ef4871-0b54-4560-a2f0-9e22aa2b449d tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "579c71bd-24f0-4257-856c-b24ddb2b9dba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.318s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1354.181236] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102079, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.406288] env[63538]: DEBUG nova.compute.manager [req-0557db9a-16ad-4f1b-ad2e-39aee8458f8c req-929bcb71-abbc-41d1-a3cc-0f9ce2e65ff1 service nova] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Received event network-vif-deleted-eb5819a0-9549-4a1a-af36-c661faf9b44f {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1354.406547] env[63538]: INFO nova.compute.manager [req-0557db9a-16ad-4f1b-ad2e-39aee8458f8c req-929bcb71-abbc-41d1-a3cc-0f9ce2e65ff1 service nova] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Neutron deleted interface eb5819a0-9549-4a1a-af36-c661faf9b44f; detaching it from the instance and deleting it from the info cache [ 1354.406691] env[63538]: DEBUG nova.network.neutron [req-0557db9a-16ad-4f1b-ad2e-39aee8458f8c req-929bcb71-abbc-41d1-a3cc-0f9ce2e65ff1 service nova] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.681471] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102079, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.874224] env[63538]: DEBUG nova.network.neutron [-] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.910332] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a24913c0-f25b-4177-8ac2-718e44b12af6 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.922584] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd07173-856b-4337-a96f-b83952ced788 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.954181] env[63538]: DEBUG nova.compute.manager [req-0557db9a-16ad-4f1b-ad2e-39aee8458f8c req-929bcb71-abbc-41d1-a3cc-0f9ce2e65ff1 service nova] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Detach interface failed, port_id=eb5819a0-9549-4a1a-af36-c661faf9b44f, reason: Instance 2f7bc37b-36c6-404a-82a9-c2b0d4a72439 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1355.181993] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102079, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.376913] env[63538]: INFO nova.compute.manager [-] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Took 1.32 seconds to deallocate network for instance. [ 1355.682243] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102079, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.884542] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1355.884908] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1355.885206] env[63538]: DEBUG nova.objects.instance [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Lazy-loading 'resources' on Instance uuid 2f7bc37b-36c6-404a-82a9-c2b0d4a72439 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1356.185404] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102079, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.33144} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.186834] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e467b0b6-bd80-444b-b4c5-e5bc6eaff630/e467b0b6-bd80-444b-b4c5-e5bc6eaff630.vmdk to [datastore2] d6215939-5e06-425d-b947-224eebb8386b/d6215939-5e06-425d-b947-224eebb8386b.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1356.189958] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f65c2319-7c67-4959-9847-3034d5f2dcfc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.193038] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "46527669-5c77-4131-a6f5-2f943f9c7347" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1356.193270] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "46527669-5c77-4131-a6f5-2f943f9c7347" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1356.214570] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] d6215939-5e06-425d-b947-224eebb8386b/d6215939-5e06-425d-b947-224eebb8386b.vmdk or device None with type streamOptimized {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1356.215581] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54263a04-60e7-433a-8411-63ce5a03b02d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.235922] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1356.235922] env[63538]: value = "task-5102080" [ 1356.235922] env[63538]: _type = "Task" [ 1356.235922] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.244490] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102080, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.438065] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac06a0f2-5711-4c1c-a2ca-88366e539754 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.445952] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-251b8e45-2e82-4f6d-817f-a28f8cca0a47 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.475705] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c6bfc5-ac11-4091-96a2-3d9959f727d3 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.483086] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6fdef84-3b76-4722-87bf-883b636f6c1a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.500788] env[63538]: DEBUG nova.compute.provider_tree [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1356.695489] env[63538]: DEBUG nova.compute.manager [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Starting instance... {{(pid=63538) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1356.745986] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102080, 'name': ReconfigVM_Task, 'duration_secs': 0.306955} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.746244] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Reconfigured VM instance instance-0000007a to attach disk [datastore2] d6215939-5e06-425d-b947-224eebb8386b/d6215939-5e06-425d-b947-224eebb8386b.vmdk or device None with type streamOptimized {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1356.746870] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4a997e49-7c8d-488a-88ee-2b17a5f42581 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.752686] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1356.752686] env[63538]: value = "task-5102081" [ 1356.752686] env[63538]: _type = "Task" [ 1356.752686] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.760045] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102081, 'name': Rename_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.004331] env[63538]: DEBUG nova.scheduler.client.report [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1357.217970] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1357.263815] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102081, 'name': Rename_Task, 'duration_secs': 0.147973} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.264087] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1357.264344] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2a6218b6-ae3a-44cd-b050-6139c92c33e8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.271376] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1357.271376] env[63538]: value = "task-5102082" [ 1357.271376] env[63538]: _type = "Task" [ 1357.271376] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.279836] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102082, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.509431] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.624s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1357.512079] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.294s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1357.513775] env[63538]: INFO nova.compute.claims [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1357.529537] env[63538]: INFO nova.scheduler.client.report [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Deleted allocations for instance 2f7bc37b-36c6-404a-82a9-c2b0d4a72439 [ 1357.782744] env[63538]: DEBUG oslo_vmware.api [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102082, 'name': PowerOnVM_Task, 'duration_secs': 0.43356} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.782986] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1357.890902] env[63538]: DEBUG nova.compute.manager [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1357.891890] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4567116-d3c2-45b9-b18b-7c691259d6d7 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.037566] env[63538]: DEBUG oslo_concurrency.lockutils [None req-2a2b60e1-4f6a-4133-999c-574026aca9e1 tempest-ServerRescueTestJSON-355456121 tempest-ServerRescueTestJSON-355456121-project-member] Lock "2f7bc37b-36c6-404a-82a9-c2b0d4a72439" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.117s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1358.411169] env[63538]: DEBUG oslo_concurrency.lockutils [None req-c1ed6ded-07d3-4860-b58b-0c92e04b5a18 tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "d6215939-5e06-425d-b947-224eebb8386b" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 19.779s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1358.566629] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e97a6f2c-19b9-4df2-9d67-7e48aef5e06b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.575941] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-657cf6e1-f85f-4b97-8400-4f65e66ef177 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.607620] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4413966f-01b9-45dc-9eea-35b285b75bcb {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.619100] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a73562a4-2b5c-4f04-bafb-446189445f2a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.634152] env[63538]: DEBUG nova.compute.provider_tree [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1359.113964] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "d6215939-5e06-425d-b947-224eebb8386b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1359.114293] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "d6215939-5e06-425d-b947-224eebb8386b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1359.114546] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "d6215939-5e06-425d-b947-224eebb8386b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1359.114717] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "d6215939-5e06-425d-b947-224eebb8386b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1359.114896] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "d6215939-5e06-425d-b947-224eebb8386b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1359.117084] env[63538]: INFO nova.compute.manager [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Terminating instance [ 1359.118899] env[63538]: DEBUG nova.compute.manager [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1359.119111] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1359.120013] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84545bee-cd86-440e-ac58-49d8eec1d0e4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.129237] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1359.129596] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f3cb9996-71e2-4d55-9c58-e827ba280018 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.137784] env[63538]: DEBUG nova.scheduler.client.report [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1359.141306] env[63538]: DEBUG oslo_vmware.api [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1359.141306] env[63538]: value = "task-5102083" [ 1359.141306] env[63538]: _type = "Task" [ 1359.141306] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.150634] env[63538]: DEBUG oslo_vmware.api [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102083, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.643512] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.131s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1359.644060] env[63538]: DEBUG nova.compute.manager [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Start building networks asynchronously for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1359.658589] env[63538]: DEBUG oslo_vmware.api [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102083, 'name': PowerOffVM_Task, 'duration_secs': 0.188659} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.658865] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1359.659058] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1359.659381] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9ba14b2d-e220-4960-9375-1f8a38bbe2af {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.722162] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1359.722414] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Deleting contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1359.722573] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Deleting the datastore file [datastore2] d6215939-5e06-425d-b947-224eebb8386b {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1359.722847] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ded52354-2d5a-4ee9-887d-b23c0c720db0 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.729731] env[63538]: DEBUG oslo_vmware.api [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for the task: (returnval){ [ 1359.729731] env[63538]: value = "task-5102085" [ 1359.729731] env[63538]: _type = "Task" [ 1359.729731] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.738724] env[63538]: DEBUG oslo_vmware.api [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102085, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.155220] env[63538]: DEBUG nova.compute.utils [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1360.156733] env[63538]: DEBUG nova.compute.manager [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Allocating IP information in the background. {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1360.156995] env[63538]: DEBUG nova.network.neutron [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] allocate_for_instance() {{(pid=63538) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1360.204581] env[63538]: DEBUG nova.policy [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb444448a4d64c5e8ec9613ed633a527', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9b1eba931f144b94b6e186dac1310dfa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63538) authorize /opt/stack/nova/nova/policy.py:201}} [ 1360.242270] env[63538]: DEBUG oslo_vmware.api [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Task: {'id': task-5102085, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.123192} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.242542] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1360.242734] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Deleted contents of the VM from datastore datastore2 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1360.242908] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1360.243102] env[63538]: INFO nova.compute.manager [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] [instance: d6215939-5e06-425d-b947-224eebb8386b] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1360.243352] env[63538]: DEBUG oslo.service.loopingcall [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1360.243552] env[63538]: DEBUG nova.compute.manager [-] [instance: d6215939-5e06-425d-b947-224eebb8386b] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1360.243646] env[63538]: DEBUG nova.network.neutron [-] [instance: d6215939-5e06-425d-b947-224eebb8386b] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1360.475393] env[63538]: DEBUG nova.network.neutron [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Successfully created port: ae5d20df-f8bf-45b5-9aac-2cb2eb561e06 {{(pid=63538) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1360.660345] env[63538]: DEBUG nova.compute.manager [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Start building block device mappings for instance. {{(pid=63538) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1360.699116] env[63538]: DEBUG nova.compute.manager [req-ae2dbfec-9ab4-41b6-a379-94bd571c2e46 req-c75dcef0-3303-4f44-80c4-de8e68073941 service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Received event network-vif-deleted-5f276ada-dc8e-4558-a169-bfcaf25172e8 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1360.699373] env[63538]: INFO nova.compute.manager [req-ae2dbfec-9ab4-41b6-a379-94bd571c2e46 req-c75dcef0-3303-4f44-80c4-de8e68073941 service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Neutron deleted interface 5f276ada-dc8e-4558-a169-bfcaf25172e8; detaching it from the instance and deleting it from the info cache [ 1360.699564] env[63538]: DEBUG nova.network.neutron [req-ae2dbfec-9ab4-41b6-a379-94bd571c2e46 req-c75dcef0-3303-4f44-80c4-de8e68073941 service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1361.173077] env[63538]: DEBUG nova.network.neutron [-] [instance: d6215939-5e06-425d-b947-224eebb8386b] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1361.203272] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f80956c3-7eb7-482e-aa62-39ef9bb9ec9d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.214840] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9056607-791a-4831-9072-f0bacff08284 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.242462] env[63538]: DEBUG nova.compute.manager [req-ae2dbfec-9ab4-41b6-a379-94bd571c2e46 req-c75dcef0-3303-4f44-80c4-de8e68073941 service nova] [instance: d6215939-5e06-425d-b947-224eebb8386b] Detach interface failed, port_id=5f276ada-dc8e-4558-a169-bfcaf25172e8, reason: Instance d6215939-5e06-425d-b947-224eebb8386b could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1361.670032] env[63538]: DEBUG nova.compute.manager [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Start spawning the instance on the hypervisor. {{(pid=63538) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1361.675085] env[63538]: INFO nova.compute.manager [-] [instance: d6215939-5e06-425d-b947-224eebb8386b] Took 1.43 seconds to deallocate network for instance. [ 1361.696346] env[63538]: DEBUG nova.virt.hardware [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-12T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-12T12:48:20Z,direct_url=,disk_format='vmdk',id=faabbca4-e27b-433a-b93d-f059fd73bc92,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='589e84f0d68d4127baed4a6b24d18503',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-12T12:48:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1361.696631] env[63538]: DEBUG nova.virt.hardware [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Flavor limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1361.696793] env[63538]: DEBUG nova.virt.hardware [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Image limits 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1361.696979] env[63538]: DEBUG nova.virt.hardware [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Flavor pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1361.697150] env[63538]: DEBUG nova.virt.hardware [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Image pref 0:0:0 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1361.697306] env[63538]: DEBUG nova.virt.hardware [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63538) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1361.697520] env[63538]: DEBUG nova.virt.hardware [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1361.697683] env[63538]: DEBUG nova.virt.hardware [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1361.697855] env[63538]: DEBUG nova.virt.hardware [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Got 1 possible topologies {{(pid=63538) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1361.698032] env[63538]: DEBUG nova.virt.hardware [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1361.698251] env[63538]: DEBUG nova.virt.hardware [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63538) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1361.699134] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e1b12a-871d-4e9e-bf86-3c3f68d14ee5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.709933] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d47c76b3-523a-43e5-a2a4-e9ea1c34a9c9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.885070] env[63538]: DEBUG nova.compute.manager [req-6be8486a-4aed-4812-a45c-79499b4c3b7d req-f0a1e116-7c2a-4881-b458-5181a241756b service nova] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Received event network-vif-plugged-ae5d20df-f8bf-45b5-9aac-2cb2eb561e06 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1361.885324] env[63538]: DEBUG oslo_concurrency.lockutils [req-6be8486a-4aed-4812-a45c-79499b4c3b7d req-f0a1e116-7c2a-4881-b458-5181a241756b service nova] Acquiring lock "46527669-5c77-4131-a6f5-2f943f9c7347-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.885551] env[63538]: DEBUG oslo_concurrency.lockutils [req-6be8486a-4aed-4812-a45c-79499b4c3b7d req-f0a1e116-7c2a-4881-b458-5181a241756b service nova] Lock "46527669-5c77-4131-a6f5-2f943f9c7347-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.885726] env[63538]: DEBUG oslo_concurrency.lockutils [req-6be8486a-4aed-4812-a45c-79499b4c3b7d req-f0a1e116-7c2a-4881-b458-5181a241756b service nova] Lock "46527669-5c77-4131-a6f5-2f943f9c7347-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1361.885904] env[63538]: DEBUG nova.compute.manager [req-6be8486a-4aed-4812-a45c-79499b4c3b7d req-f0a1e116-7c2a-4881-b458-5181a241756b service nova] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] No waiting events found dispatching network-vif-plugged-ae5d20df-f8bf-45b5-9aac-2cb2eb561e06 {{(pid=63538) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1361.886159] env[63538]: WARNING nova.compute.manager [req-6be8486a-4aed-4812-a45c-79499b4c3b7d req-f0a1e116-7c2a-4881-b458-5181a241756b service nova] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Received unexpected event network-vif-plugged-ae5d20df-f8bf-45b5-9aac-2cb2eb561e06 for instance with vm_state building and task_state spawning. [ 1361.992017] env[63538]: DEBUG nova.network.neutron [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Successfully updated port: ae5d20df-f8bf-45b5-9aac-2cb2eb561e06 {{(pid=63538) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1362.181811] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.182091] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1362.182331] env[63538]: DEBUG nova.objects.instance [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lazy-loading 'resources' on Instance uuid d6215939-5e06-425d-b947-224eebb8386b {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1362.494352] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "refresh_cache-46527669-5c77-4131-a6f5-2f943f9c7347" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1362.494352] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquired lock "refresh_cache-46527669-5c77-4131-a6f5-2f943f9c7347" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1362.494504] env[63538]: DEBUG nova.network.neutron [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Building network info cache for instance {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1362.730879] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c2a0c5-233b-423d-8158-020e0160d691 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.738984] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b4a0269-0051-4886-a064-ce60bd7eb060 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.769924] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53bf9d8d-b09e-4577-a6d9-f6f94769669b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.778229] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83631ae5-b87d-453a-8d4a-21d531c3a7c9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.793642] env[63538]: DEBUG nova.compute.provider_tree [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1363.027261] env[63538]: DEBUG nova.network.neutron [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Instance cache missing network info. {{(pid=63538) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1363.199997] env[63538]: DEBUG nova.network.neutron [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Updating instance_info_cache with network_info: [{"id": "ae5d20df-f8bf-45b5-9aac-2cb2eb561e06", "address": "fa:16:3e:42:8c:ad", "network": {"id": "64d377b2-188d-4b62-8b84-64d9c351ca10", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1354632003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b1eba931f144b94b6e186dac1310dfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae5d20df-f8", "ovs_interfaceid": "ae5d20df-f8bf-45b5-9aac-2cb2eb561e06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1363.296375] env[63538]: DEBUG nova.scheduler.client.report [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1363.703101] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Releasing lock "refresh_cache-46527669-5c77-4131-a6f5-2f943f9c7347" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1363.703445] env[63538]: DEBUG nova.compute.manager [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Instance network_info: |[{"id": "ae5d20df-f8bf-45b5-9aac-2cb2eb561e06", "address": "fa:16:3e:42:8c:ad", "network": {"id": "64d377b2-188d-4b62-8b84-64d9c351ca10", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1354632003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b1eba931f144b94b6e186dac1310dfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae5d20df-f8", "ovs_interfaceid": "ae5d20df-f8bf-45b5-9aac-2cb2eb561e06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63538) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1363.703897] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:8c:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37434b93-dfdc-4a3f-bf5a-9f2cbe25a754', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ae5d20df-f8bf-45b5-9aac-2cb2eb561e06', 'vif_model': 'vmxnet3'}] {{(pid=63538) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1363.711784] env[63538]: DEBUG oslo.service.loopingcall [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1363.712050] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Creating VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1363.712294] env[63538]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-99783254-fba7-498c-8ce8-16d0e244084b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.733403] env[63538]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1363.733403] env[63538]: value = "task-5102086" [ 1363.733403] env[63538]: _type = "Task" [ 1363.733403] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.741594] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5102086, 'name': CreateVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.801287] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.619s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.822062] env[63538]: INFO nova.scheduler.client.report [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Deleted allocations for instance d6215939-5e06-425d-b947-224eebb8386b [ 1363.913861] env[63538]: DEBUG nova.compute.manager [req-457c905b-245b-4b3c-b3d8-56b87149ba60 req-b11d9cc5-a5a9-4dd5-b0b4-83dc3a0a3bc1 service nova] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Received event network-changed-ae5d20df-f8bf-45b5-9aac-2cb2eb561e06 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1363.913968] env[63538]: DEBUG nova.compute.manager [req-457c905b-245b-4b3c-b3d8-56b87149ba60 req-b11d9cc5-a5a9-4dd5-b0b4-83dc3a0a3bc1 service nova] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Refreshing instance network info cache due to event network-changed-ae5d20df-f8bf-45b5-9aac-2cb2eb561e06. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1363.914324] env[63538]: DEBUG oslo_concurrency.lockutils [req-457c905b-245b-4b3c-b3d8-56b87149ba60 req-b11d9cc5-a5a9-4dd5-b0b4-83dc3a0a3bc1 service nova] Acquiring lock "refresh_cache-46527669-5c77-4131-a6f5-2f943f9c7347" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1363.914610] env[63538]: DEBUG oslo_concurrency.lockutils [req-457c905b-245b-4b3c-b3d8-56b87149ba60 req-b11d9cc5-a5a9-4dd5-b0b4-83dc3a0a3bc1 service nova] Acquired lock "refresh_cache-46527669-5c77-4131-a6f5-2f943f9c7347" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1363.914841] env[63538]: DEBUG nova.network.neutron [req-457c905b-245b-4b3c-b3d8-56b87149ba60 req-b11d9cc5-a5a9-4dd5-b0b4-83dc3a0a3bc1 service nova] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Refreshing network info cache for port ae5d20df-f8bf-45b5-9aac-2cb2eb561e06 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1364.095164] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1364.095400] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1364.244531] env[63538]: DEBUG oslo_vmware.api [-] Task: {'id': task-5102086, 'name': CreateVM_Task, 'duration_secs': 0.308814} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.244716] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Created VM on the ESX host {{(pid=63538) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1364.245514] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1364.245670] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.246058] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1364.246373] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c3a6116-3a6b-4fe2-8191-442cab3836c9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.251481] env[63538]: DEBUG oslo_vmware.api [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1364.251481] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ca38e6-a6e6-8991-9941-d47253d03b02" [ 1364.251481] env[63538]: _type = "Task" [ 1364.251481] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.260945] env[63538]: DEBUG oslo_vmware.api [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ca38e6-a6e6-8991-9941-d47253d03b02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.329616] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fae1024b-e475-44bb-967a-d386cbd7c8cb tempest-ServerActionsTestOtherB-1578235278 tempest-ServerActionsTestOtherB-1578235278-project-member] Lock "d6215939-5e06-425d-b947-224eebb8386b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.215s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1364.600620] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1364.600804] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Starting heal instance info cache {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10017}} [ 1364.632771] env[63538]: DEBUG nova.network.neutron [req-457c905b-245b-4b3c-b3d8-56b87149ba60 req-b11d9cc5-a5a9-4dd5-b0b4-83dc3a0a3bc1 service nova] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Updated VIF entry in instance network info cache for port ae5d20df-f8bf-45b5-9aac-2cb2eb561e06. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1364.633159] env[63538]: DEBUG nova.network.neutron [req-457c905b-245b-4b3c-b3d8-56b87149ba60 req-b11d9cc5-a5a9-4dd5-b0b4-83dc3a0a3bc1 service nova] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Updating instance_info_cache with network_info: [{"id": "ae5d20df-f8bf-45b5-9aac-2cb2eb561e06", "address": "fa:16:3e:42:8c:ad", "network": {"id": "64d377b2-188d-4b62-8b84-64d9c351ca10", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1354632003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b1eba931f144b94b6e186dac1310dfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae5d20df-f8", "ovs_interfaceid": "ae5d20df-f8bf-45b5-9aac-2cb2eb561e06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1364.762744] env[63538]: DEBUG oslo_vmware.api [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52ca38e6-a6e6-8991-9941-d47253d03b02, 'name': SearchDatastore_Task, 'duration_secs': 0.009889} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.763114] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1364.763332] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Processing image faabbca4-e27b-433a-b93d-f059fd73bc92 {{(pid=63538) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1364.763618] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1364.763788] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquired lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.763977] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1364.764262] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a202bb90-54c5-4db4-b542-17b9db46678a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.774578] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63538) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1364.774764] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63538) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1364.775805] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a818c1d-2702-4b94-81a2-7613f72cb9f2 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.781495] env[63538]: DEBUG oslo_vmware.api [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1364.781495] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52fe775c-d390-91f7-7704-5a6b49172df0" [ 1364.781495] env[63538]: _type = "Task" [ 1364.781495] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.789730] env[63538]: DEBUG oslo_vmware.api [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52fe775c-d390-91f7-7704-5a6b49172df0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.136855] env[63538]: DEBUG oslo_concurrency.lockutils [req-457c905b-245b-4b3c-b3d8-56b87149ba60 req-b11d9cc5-a5a9-4dd5-b0b4-83dc3a0a3bc1 service nova] Releasing lock "refresh_cache-46527669-5c77-4131-a6f5-2f943f9c7347" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1365.292323] env[63538]: DEBUG oslo_vmware.api [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]52fe775c-d390-91f7-7704-5a6b49172df0, 'name': SearchDatastore_Task, 'duration_secs': 0.009215} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.293147] env[63538]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23ccd7d7-7c51-4040-a106-8485b3bf4d16 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.298681] env[63538]: DEBUG oslo_vmware.api [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1365.298681] env[63538]: value = "session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523423d0-0467-53f7-8703-e351ac1da2a4" [ 1365.298681] env[63538]: _type = "Task" [ 1365.298681] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.307171] env[63538]: DEBUG oslo_vmware.api [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523423d0-0467-53f7-8703-e351ac1da2a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.606851] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Didn't find any instances for network info cache update. {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10103}} [ 1365.607148] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1365.607350] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1365.607516] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1365.607672] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1365.607849] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1365.608034] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1365.608210] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63538) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10636}} [ 1365.608405] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1365.810828] env[63538]: DEBUG oslo_vmware.api [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': session[52ff5d9d-4dc4-c26b-9f15-0aefa8328b97]523423d0-0467-53f7-8703-e351ac1da2a4, 'name': SearchDatastore_Task, 'duration_secs': 0.010027} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.811214] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Releasing lock "[datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1365.811360] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 46527669-5c77-4131-a6f5-2f943f9c7347/46527669-5c77-4131-a6f5-2f943f9c7347.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1365.811632] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-400857d7-8120-404a-9f94-530e88063f5c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.818542] env[63538]: DEBUG oslo_vmware.api [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1365.818542] env[63538]: value = "task-5102088" [ 1365.818542] env[63538]: _type = "Task" [ 1365.818542] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.826426] env[63538]: DEBUG oslo_vmware.api [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102088, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.112040] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1366.112431] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1366.112724] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1366.112898] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63538) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1366.113953] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1501c3-2524-4c66-b2b0-00e4a2beb27a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.123466] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-377b3a27-9825-446c-80cc-37b643e86714 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.139410] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-034c688a-dfb2-4f1c-9c0a-03b10f69d9b8 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.147239] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7dd50c7-ba9c-4dec-b40f-4ca4bcaa1796 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.179370] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180111MB free_disk=95GB free_vcpus=48 pci_devices=None {{(pid=63538) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1366.179574] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1366.179777] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1366.328721] env[63538]: DEBUG oslo_vmware.api [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102088, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.472903} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.328947] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/faabbca4-e27b-433a-b93d-f059fd73bc92/faabbca4-e27b-433a-b93d-f059fd73bc92.vmdk to [datastore1] 46527669-5c77-4131-a6f5-2f943f9c7347/46527669-5c77-4131-a6f5-2f943f9c7347.vmdk {{(pid=63538) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1366.329190] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Extending root virtual disk to 1048576 {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1366.329490] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b3f5d333-b74b-4c18-806b-005331d2194e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.336640] env[63538]: DEBUG oslo_vmware.api [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1366.336640] env[63538]: value = "task-5102089" [ 1366.336640] env[63538]: _type = "Task" [ 1366.336640] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.347131] env[63538]: DEBUG oslo_vmware.api [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102089, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.848121] env[63538]: DEBUG oslo_vmware.api [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102089, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065594} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.848666] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Extended root virtual disk {{(pid=63538) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1366.849403] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8184a0-df37-4173-8c63-9425d21dfcc9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.874428] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Reconfiguring VM instance instance-0000007e to attach disk [datastore1] 46527669-5c77-4131-a6f5-2f943f9c7347/46527669-5c77-4131-a6f5-2f943f9c7347.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1366.874824] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac0afa8c-aa63-4023-903d-950a8b23c4cf {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.895187] env[63538]: DEBUG oslo_vmware.api [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1366.895187] env[63538]: value = "task-5102090" [ 1366.895187] env[63538]: _type = "Task" [ 1366.895187] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.904494] env[63538]: DEBUG oslo_vmware.api [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102090, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.205072] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Instance 46527669-5c77-4131-a6f5-2f943f9c7347 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63538) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1367.205361] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63538) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1367.205574] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=100GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '1', 'num_vm_building': '1', 'num_task_spawning': '1', 'num_os_type_None': '1', 'num_proj_9b1eba931f144b94b6e186dac1310dfa': '1', 'io_workload': '1'} {{(pid=63538) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1367.232470] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df3a390-8c29-44c9-8c9f-bb677edd4a0b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.241160] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f92375-a236-4e54-9d8d-3bbd593800f4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.273779] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d690628-c551-4438-8db0-f5016073527c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.281985] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc43092d-2416-4215-9d0c-5f8342068f5d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.296092] env[63538]: DEBUG nova.compute.provider_tree [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1367.405551] env[63538]: DEBUG oslo_vmware.api [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102090, 'name': ReconfigVM_Task, 'duration_secs': 0.305251} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.405905] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Reconfigured VM instance instance-0000007e to attach disk [datastore1] 46527669-5c77-4131-a6f5-2f943f9c7347/46527669-5c77-4131-a6f5-2f943f9c7347.vmdk or device None with type sparse {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1367.406579] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6a5d7c5b-6805-4024-aa9a-0fb53dc636fc {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.413585] env[63538]: DEBUG oslo_vmware.api [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1367.413585] env[63538]: value = "task-5102091" [ 1367.413585] env[63538]: _type = "Task" [ 1367.413585] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.421898] env[63538]: DEBUG oslo_vmware.api [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102091, 'name': Rename_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.798790] env[63538]: DEBUG nova.scheduler.client.report [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1367.924415] env[63538]: DEBUG oslo_vmware.api [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102091, 'name': Rename_Task, 'duration_secs': 0.142236} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.924810] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Powering on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1367.924961] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9583765c-491f-4b22-bd40-77f0456540fa {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.932085] env[63538]: DEBUG oslo_vmware.api [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1367.932085] env[63538]: value = "task-5102092" [ 1367.932085] env[63538]: _type = "Task" [ 1367.932085] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.940477] env[63538]: DEBUG oslo_vmware.api [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102092, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.304098] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63538) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1368.304242] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.124s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1368.442990] env[63538]: DEBUG oslo_vmware.api [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102092, 'name': PowerOnVM_Task, 'duration_secs': 0.43717} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.443422] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Powered on the VM {{(pid=63538) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1368.443634] env[63538]: INFO nova.compute.manager [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Took 6.77 seconds to spawn the instance on the hypervisor. [ 1368.443826] env[63538]: DEBUG nova.compute.manager [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Checking state {{(pid=63538) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1368.444633] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-201331c3-0897-49a4-a6c4-829c375b2841 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.962496] env[63538]: INFO nova.compute.manager [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Took 11.76 seconds to build instance. [ 1369.466085] env[63538]: DEBUG oslo_concurrency.lockutils [None req-fb80f8b3-2738-49b9-8bee-099984fc2f5c tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "46527669-5c77-4131-a6f5-2f943f9c7347" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.273s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1369.561879] env[63538]: DEBUG nova.compute.manager [req-09ac0e55-6b5b-4966-9f01-faf43168f33a req-cf620f6b-0496-4529-82f7-6dfa93a9a75f service nova] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Received event network-changed-ae5d20df-f8bf-45b5-9aac-2cb2eb561e06 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1369.562114] env[63538]: DEBUG nova.compute.manager [req-09ac0e55-6b5b-4966-9f01-faf43168f33a req-cf620f6b-0496-4529-82f7-6dfa93a9a75f service nova] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Refreshing instance network info cache due to event network-changed-ae5d20df-f8bf-45b5-9aac-2cb2eb561e06. {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11225}} [ 1369.562343] env[63538]: DEBUG oslo_concurrency.lockutils [req-09ac0e55-6b5b-4966-9f01-faf43168f33a req-cf620f6b-0496-4529-82f7-6dfa93a9a75f service nova] Acquiring lock "refresh_cache-46527669-5c77-4131-a6f5-2f943f9c7347" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1369.562635] env[63538]: DEBUG oslo_concurrency.lockutils [req-09ac0e55-6b5b-4966-9f01-faf43168f33a req-cf620f6b-0496-4529-82f7-6dfa93a9a75f service nova] Acquired lock "refresh_cache-46527669-5c77-4131-a6f5-2f943f9c7347" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.562767] env[63538]: DEBUG nova.network.neutron [req-09ac0e55-6b5b-4966-9f01-faf43168f33a req-cf620f6b-0496-4529-82f7-6dfa93a9a75f service nova] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Refreshing network info cache for port ae5d20df-f8bf-45b5-9aac-2cb2eb561e06 {{(pid=63538) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1370.321689] env[63538]: DEBUG nova.network.neutron [req-09ac0e55-6b5b-4966-9f01-faf43168f33a req-cf620f6b-0496-4529-82f7-6dfa93a9a75f service nova] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Updated VIF entry in instance network info cache for port ae5d20df-f8bf-45b5-9aac-2cb2eb561e06. {{(pid=63538) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1370.322097] env[63538]: DEBUG nova.network.neutron [req-09ac0e55-6b5b-4966-9f01-faf43168f33a req-cf620f6b-0496-4529-82f7-6dfa93a9a75f service nova] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Updating instance_info_cache with network_info: [{"id": "ae5d20df-f8bf-45b5-9aac-2cb2eb561e06", "address": "fa:16:3e:42:8c:ad", "network": {"id": "64d377b2-188d-4b62-8b84-64d9c351ca10", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1354632003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b1eba931f144b94b6e186dac1310dfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae5d20df-f8", "ovs_interfaceid": "ae5d20df-f8bf-45b5-9aac-2cb2eb561e06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1370.825080] env[63538]: DEBUG oslo_concurrency.lockutils [req-09ac0e55-6b5b-4966-9f01-faf43168f33a req-cf620f6b-0496-4529-82f7-6dfa93a9a75f service nova] Releasing lock "refresh_cache-46527669-5c77-4131-a6f5-2f943f9c7347" {{(pid=63538) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1408.465223] env[63538]: DEBUG oslo_concurrency.lockutils [None req-29c1a473-95f2-4052-ac43-9d58fb952217 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "46527669-5c77-4131-a6f5-2f943f9c7347" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1408.465530] env[63538]: DEBUG oslo_concurrency.lockutils [None req-29c1a473-95f2-4052-ac43-9d58fb952217 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "46527669-5c77-4131-a6f5-2f943f9c7347" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1408.969173] env[63538]: DEBUG nova.compute.utils [None req-29c1a473-95f2-4052-ac43-9d58fb952217 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Using /dev/sd instead of None {{(pid=63538) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1409.471942] env[63538]: DEBUG oslo_concurrency.lockutils [None req-29c1a473-95f2-4052-ac43-9d58fb952217 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "46527669-5c77-4131-a6f5-2f943f9c7347" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.512067] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1410.512440] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Cleaning up deleted instances with incomplete migration {{(pid=63538) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11355}} [ 1410.532429] env[63538]: DEBUG oslo_concurrency.lockutils [None req-29c1a473-95f2-4052-ac43-9d58fb952217 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "46527669-5c77-4131-a6f5-2f943f9c7347" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.532848] env[63538]: DEBUG oslo_concurrency.lockutils [None req-29c1a473-95f2-4052-ac43-9d58fb952217 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "46527669-5c77-4131-a6f5-2f943f9c7347" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.533245] env[63538]: INFO nova.compute.manager [None req-29c1a473-95f2-4052-ac43-9d58fb952217 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Attaching volume 193a422b-0329-47f4-899e-64999de5ef67 to /dev/sdb [ 1410.567321] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5665f1-d7fd-4745-b02c-8699acfac98e {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.577335] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a1b4d7-4dd0-4061-b59b-ab27b3e38f92 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.594285] env[63538]: DEBUG nova.virt.block_device [None req-29c1a473-95f2-4052-ac43-9d58fb952217 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Updating existing volume attachment record: 603d97be-7d53-4d30-85a9-ae873196bcb3 {{(pid=63538) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1413.014160] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1413.014554] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1415.140513] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-29c1a473-95f2-4052-ac43-9d58fb952217 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Volume attach. Driver type: vmdk {{(pid=63538) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1415.140759] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-29c1a473-95f2-4052-ac43-9d58fb952217 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992573', 'volume_id': '193a422b-0329-47f4-899e-64999de5ef67', 'name': 'volume-193a422b-0329-47f4-899e-64999de5ef67', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '46527669-5c77-4131-a6f5-2f943f9c7347', 'attached_at': '', 'detached_at': '', 'volume_id': '193a422b-0329-47f4-899e-64999de5ef67', 'serial': '193a422b-0329-47f4-899e-64999de5ef67'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1415.141684] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4fa542d-5fa7-4533-bf3f-a815666e528a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.160008] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14172853-4149-4bb2-b536-4a53090b289d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.184752] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-29c1a473-95f2-4052-ac43-9d58fb952217 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Reconfiguring VM instance instance-0000007e to attach disk [datastore1] volume-193a422b-0329-47f4-899e-64999de5ef67/volume-193a422b-0329-47f4-899e-64999de5ef67.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1415.185040] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb8a222a-9b41-4a72-8633-b8b2a45bf876 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.203327] env[63538]: DEBUG oslo_vmware.api [None req-29c1a473-95f2-4052-ac43-9d58fb952217 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1415.203327] env[63538]: value = "task-5102095" [ 1415.203327] env[63538]: _type = "Task" [ 1415.203327] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.211896] env[63538]: DEBUG oslo_vmware.api [None req-29c1a473-95f2-4052-ac43-9d58fb952217 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102095, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.512234] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1415.512400] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Cleaning up deleted instances {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11317}} [ 1415.714781] env[63538]: DEBUG oslo_vmware.api [None req-29c1a473-95f2-4052-ac43-9d58fb952217 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102095, 'name': ReconfigVM_Task, 'duration_secs': 0.335495} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.715065] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-29c1a473-95f2-4052-ac43-9d58fb952217 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Reconfigured VM instance instance-0000007e to attach disk [datastore1] volume-193a422b-0329-47f4-899e-64999de5ef67/volume-193a422b-0329-47f4-899e-64999de5ef67.vmdk or device None with type thin {{(pid=63538) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1415.719853] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78170de7-34e7-494c-a16c-084dbdb72e94 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.735321] env[63538]: DEBUG oslo_vmware.api [None req-29c1a473-95f2-4052-ac43-9d58fb952217 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1415.735321] env[63538]: value = "task-5102096" [ 1415.735321] env[63538]: _type = "Task" [ 1415.735321] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.744383] env[63538]: DEBUG oslo_vmware.api [None req-29c1a473-95f2-4052-ac43-9d58fb952217 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102096, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.021306] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] There are 28 instances to clean {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11326}} [ 1416.021535] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 40d2c269-449b-4b1e-9422-abcfb6543c11] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1416.246236] env[63538]: DEBUG oslo_vmware.api [None req-29c1a473-95f2-4052-ac43-9d58fb952217 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102096, 'name': ReconfigVM_Task, 'duration_secs': 0.134367} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.246522] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-29c1a473-95f2-4052-ac43-9d58fb952217 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992573', 'volume_id': '193a422b-0329-47f4-899e-64999de5ef67', 'name': 'volume-193a422b-0329-47f4-899e-64999de5ef67', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '46527669-5c77-4131-a6f5-2f943f9c7347', 'attached_at': '', 'detached_at': '', 'volume_id': '193a422b-0329-47f4-899e-64999de5ef67', 'serial': '193a422b-0329-47f4-899e-64999de5ef67'} {{(pid=63538) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1416.525166] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 2f7bc37b-36c6-404a-82a9-c2b0d4a72439] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1417.028826] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 579c71bd-24f0-4257-856c-b24ddb2b9dba] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1417.287324] env[63538]: DEBUG nova.objects.instance [None req-29c1a473-95f2-4052-ac43-9d58fb952217 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lazy-loading 'flavor' on Instance uuid 46527669-5c77-4131-a6f5-2f943f9c7347 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1417.533254] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: d6215939-5e06-425d-b947-224eebb8386b] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1417.794347] env[63538]: DEBUG oslo_concurrency.lockutils [None req-29c1a473-95f2-4052-ac43-9d58fb952217 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "46527669-5c77-4131-a6f5-2f943f9c7347" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.261s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1417.967638] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b80c57b0-f702-4103-9490-bd05c22f2e88 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "46527669-5c77-4131-a6f5-2f943f9c7347" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1417.967911] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b80c57b0-f702-4103-9490-bd05c22f2e88 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "46527669-5c77-4131-a6f5-2f943f9c7347" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.036241] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: afa669ca-26b3-4b9d-ac9d-abbc966d5798] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1418.471289] env[63538]: INFO nova.compute.manager [None req-b80c57b0-f702-4103-9490-bd05c22f2e88 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Detaching volume 193a422b-0329-47f4-899e-64999de5ef67 [ 1418.502740] env[63538]: INFO nova.virt.block_device [None req-b80c57b0-f702-4103-9490-bd05c22f2e88 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Attempting to driver detach volume 193a422b-0329-47f4-899e-64999de5ef67 from mountpoint /dev/sdb [ 1418.502971] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b80c57b0-f702-4103-9490-bd05c22f2e88 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Volume detach. Driver type: vmdk {{(pid=63538) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1418.503188] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b80c57b0-f702-4103-9490-bd05c22f2e88 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992573', 'volume_id': '193a422b-0329-47f4-899e-64999de5ef67', 'name': 'volume-193a422b-0329-47f4-899e-64999de5ef67', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '46527669-5c77-4131-a6f5-2f943f9c7347', 'attached_at': '', 'detached_at': '', 'volume_id': '193a422b-0329-47f4-899e-64999de5ef67', 'serial': '193a422b-0329-47f4-899e-64999de5ef67'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1418.504143] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-609f7e4b-8203-4c76-bbd1-a766c32d56ce {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.525860] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb3562b0-2257-4ee4-ad4c-2432464d224f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.533709] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b76e33-118d-4ac7-b3d8-45406f63878c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.553809] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: c1766d8e-7949-4fa8-a762-007d016a4de1] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1418.556260] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-621bef9b-f458-43d2-8566-9c21ffb4222c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.571502] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b80c57b0-f702-4103-9490-bd05c22f2e88 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] The volume has not been displaced from its original location: [datastore1] volume-193a422b-0329-47f4-899e-64999de5ef67/volume-193a422b-0329-47f4-899e-64999de5ef67.vmdk. No consolidation needed. {{(pid=63538) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1418.576768] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b80c57b0-f702-4103-9490-bd05c22f2e88 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Reconfiguring VM instance instance-0000007e to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1418.577674] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-001630a9-b058-4ebc-8223-e33edd16c364 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.596484] env[63538]: DEBUG oslo_vmware.api [None req-b80c57b0-f702-4103-9490-bd05c22f2e88 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1418.596484] env[63538]: value = "task-5102097" [ 1418.596484] env[63538]: _type = "Task" [ 1418.596484] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.606792] env[63538]: DEBUG oslo_vmware.api [None req-b80c57b0-f702-4103-9490-bd05c22f2e88 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102097, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.060064] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: e93aab2e-f8c4-4959-923f-0449a84108d6] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1419.106599] env[63538]: DEBUG oslo_vmware.api [None req-b80c57b0-f702-4103-9490-bd05c22f2e88 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102097, 'name': ReconfigVM_Task, 'duration_secs': 0.235168} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.106935] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b80c57b0-f702-4103-9490-bd05c22f2e88 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Reconfigured VM instance instance-0000007e to detach disk 2001 {{(pid=63538) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1419.111598] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24f9b305-3a3d-4d62-8486-c18a5ced4889 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.126825] env[63538]: DEBUG oslo_vmware.api [None req-b80c57b0-f702-4103-9490-bd05c22f2e88 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1419.126825] env[63538]: value = "task-5102098" [ 1419.126825] env[63538]: _type = "Task" [ 1419.126825] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.134777] env[63538]: DEBUG oslo_vmware.api [None req-b80c57b0-f702-4103-9490-bd05c22f2e88 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102098, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.563026] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 057f192d-b470-4683-b197-913457d10717] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1419.637234] env[63538]: DEBUG oslo_vmware.api [None req-b80c57b0-f702-4103-9490-bd05c22f2e88 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102098, 'name': ReconfigVM_Task, 'duration_secs': 0.137087} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.637541] env[63538]: DEBUG nova.virt.vmwareapi.volumeops [None req-b80c57b0-f702-4103-9490-bd05c22f2e88 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-992573', 'volume_id': '193a422b-0329-47f4-899e-64999de5ef67', 'name': 'volume-193a422b-0329-47f4-899e-64999de5ef67', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '46527669-5c77-4131-a6f5-2f943f9c7347', 'attached_at': '', 'detached_at': '', 'volume_id': '193a422b-0329-47f4-899e-64999de5ef67', 'serial': '193a422b-0329-47f4-899e-64999de5ef67'} {{(pid=63538) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1420.066295] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: df85b1e1-0319-4619-8680-73bb5d413595] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1420.183254] env[63538]: DEBUG nova.objects.instance [None req-b80c57b0-f702-4103-9490-bd05c22f2e88 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lazy-loading 'flavor' on Instance uuid 46527669-5c77-4131-a6f5-2f943f9c7347 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1420.570259] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 58ac9c5f-bcbe-41e9-a7ce-45d164b8ea2a] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1421.076027] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: d91a140b-6ca9-4c0e-b433-795d2014975c] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1421.190828] env[63538]: DEBUG oslo_concurrency.lockutils [None req-b80c57b0-f702-4103-9490-bd05c22f2e88 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "46527669-5c77-4131-a6f5-2f943f9c7347" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.223s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.579234] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 6a8de8d2-aa15-4057-a936-57cad9c8b1d0] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1422.082687] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: f0183c1f-4557-45fd-ba65-4821ef661173] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1422.213614] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "46527669-5c77-4131-a6f5-2f943f9c7347" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.213891] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "46527669-5c77-4131-a6f5-2f943f9c7347" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1422.215462] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "46527669-5c77-4131-a6f5-2f943f9c7347-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.215691] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "46527669-5c77-4131-a6f5-2f943f9c7347-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.002s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1422.215876] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "46527669-5c77-4131-a6f5-2f943f9c7347-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1422.218215] env[63538]: INFO nova.compute.manager [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Terminating instance [ 1422.220317] env[63538]: DEBUG nova.compute.manager [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Start destroying the instance on the hypervisor. {{(pid=63538) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1422.220514] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Destroying instance {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1422.221351] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5aa21f3-2ce2-4c31-84ec-c451db99fd0b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.229291] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Powering off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1422.229515] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-93d105e2-c907-4339-b6f4-57d21682c59a {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.235954] env[63538]: DEBUG oslo_vmware.api [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1422.235954] env[63538]: value = "task-5102099" [ 1422.235954] env[63538]: _type = "Task" [ 1422.235954] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.244110] env[63538]: DEBUG oslo_vmware.api [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102099, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.585886] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: e3feec17-ca1b-4873-bb0a-370c3868aabf] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1422.746570] env[63538]: DEBUG oslo_vmware.api [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102099, 'name': PowerOffVM_Task, 'duration_secs': 0.216432} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.746859] env[63538]: DEBUG nova.virt.vmwareapi.vm_util [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Powered off the VM {{(pid=63538) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1422.747025] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Unregistering the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1422.747281] env[63538]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-88bae71d-26bc-48fc-9a4e-7e608d708d1d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.811999] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Unregistered the VM {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1422.812374] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Deleting contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1422.812671] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Deleting the datastore file [datastore1] 46527669-5c77-4131-a6f5-2f943f9c7347 {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1422.813025] env[63538]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ff35e96-4756-4ba5-9a4f-a3efe3edff14 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.820360] env[63538]: DEBUG oslo_vmware.api [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for the task: (returnval){ [ 1422.820360] env[63538]: value = "task-5102101" [ 1422.820360] env[63538]: _type = "Task" [ 1422.820360] env[63538]: } to complete. {{(pid=63538) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.834147] env[63538]: DEBUG oslo_vmware.api [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102101, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.090265] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 0599fa68-1109-4edf-b42e-f81e7f09d641] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1423.330358] env[63538]: DEBUG oslo_vmware.api [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Task: {'id': task-5102101, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141656} completed successfully. {{(pid=63538) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.330562] env[63538]: DEBUG nova.virt.vmwareapi.ds_util [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Deleted the datastore file {{(pid=63538) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1423.330747] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Deleted contents of the VM from datastore datastore1 {{(pid=63538) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1423.330906] env[63538]: DEBUG nova.virt.vmwareapi.vmops [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Instance destroyed {{(pid=63538) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1423.331106] env[63538]: INFO nova.compute.manager [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1423.331361] env[63538]: DEBUG oslo.service.loopingcall [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63538) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1423.331557] env[63538]: DEBUG nova.compute.manager [-] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Deallocating network for instance {{(pid=63538) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1423.331646] env[63538]: DEBUG nova.network.neutron [-] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] deallocate_for_instance() {{(pid=63538) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1423.594081] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 4e07fbfb-cae0-440d-8f75-c76cce3f7d00] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1423.763278] env[63538]: DEBUG nova.compute.manager [req-9b6e45e3-15f6-40b7-aecb-59eac8bb9437 req-82325fd3-7223-45c0-b8bb-2bc99067b17d service nova] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Received event network-vif-deleted-ae5d20df-f8bf-45b5-9aac-2cb2eb561e06 {{(pid=63538) external_instance_event /opt/stack/nova/nova/compute/manager.py:11220}} [ 1423.763516] env[63538]: INFO nova.compute.manager [req-9b6e45e3-15f6-40b7-aecb-59eac8bb9437 req-82325fd3-7223-45c0-b8bb-2bc99067b17d service nova] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Neutron deleted interface ae5d20df-f8bf-45b5-9aac-2cb2eb561e06; detaching it from the instance and deleting it from the info cache [ 1423.763676] env[63538]: DEBUG nova.network.neutron [req-9b6e45e3-15f6-40b7-aecb-59eac8bb9437 req-82325fd3-7223-45c0-b8bb-2bc99067b17d service nova] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1424.096869] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 12beddad-1f19-4cee-b885-3079e3603ba3] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1424.239988] env[63538]: DEBUG nova.network.neutron [-] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Updating instance_info_cache with network_info: [] {{(pid=63538) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1424.266232] env[63538]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8a50cb32-6b24-41ea-ad16-7c0b5725ab4f {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.277755] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10faa879-dd73-4fb9-80b4-70c205dc2573 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.307085] env[63538]: DEBUG nova.compute.manager [req-9b6e45e3-15f6-40b7-aecb-59eac8bb9437 req-82325fd3-7223-45c0-b8bb-2bc99067b17d service nova] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Detach interface failed, port_id=ae5d20df-f8bf-45b5-9aac-2cb2eb561e06, reason: Instance 46527669-5c77-4131-a6f5-2f943f9c7347 could not be found. {{(pid=63538) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11054}} [ 1424.599947] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: aaf52cad-86fd-42df-8ee3-13724e3f5e94] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1424.742993] env[63538]: INFO nova.compute.manager [-] [instance: 46527669-5c77-4131-a6f5-2f943f9c7347] Took 1.41 seconds to deallocate network for instance. [ 1425.102982] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: e0d5a3b2-21e1-4de0-ac10-1a5687a60c10] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1425.249563] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.249870] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.250140] env[63538]: DEBUG nova.objects.instance [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lazy-loading 'resources' on Instance uuid 46527669-5c77-4131-a6f5-2f943f9c7347 {{(pid=63538) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1425.606203] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: d00151c1-ca34-4c57-9ed2-74d506a0cffb] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1425.904205] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9205f09-ac2f-4a59-b608-4069bb2a5e0d {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.912386] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-787df766-b94e-4b5f-8915-fc641e012436 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.942506] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb20c802-c330-4253-b7c5-ab3063c0de4b {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.950270] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e28898ce-b415-45e9-a6f0-8d2362240750 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.965147] env[63538]: DEBUG nova.compute.provider_tree [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1426.109580] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 4387a3ec-0f0b-4917-97f3-08c737bee4e7] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1426.469477] env[63538]: DEBUG nova.scheduler.client.report [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1426.613448] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 17350ce4-555b-4f00-9a75-de32a4453141] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1426.976482] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.726s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.994976] env[63538]: INFO nova.scheduler.client.report [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Deleted allocations for instance 46527669-5c77-4131-a6f5-2f943f9c7347 [ 1427.116871] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 048573b4-26db-4a62-81e0-1bc1c3999d02] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1427.505576] env[63538]: DEBUG oslo_concurrency.lockutils [None req-f81d3426-7f82-49fe-b6ae-b8cd9824bb30 tempest-AttachVolumeNegativeTest-1475514698 tempest-AttachVolumeNegativeTest-1475514698-project-member] Lock "46527669-5c77-4131-a6f5-2f943f9c7347" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.292s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1427.620313] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 42af31f3-a9d0-4fdd-99fa-442ebe915277] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1428.123944] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 049518bd-d569-491a-8f79-6f0b78cf44b2] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1428.627089] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 6257bf5c-8a1c-4204-9605-cc07491e14ea] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1429.130889] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: 3d80dc17-e330-4575-8e12-e06d8e76274a] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1429.634238] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] [instance: fb26fb32-a420-4667-850c-e32786edd8f2] Instance has had 0 of 5 cleanup attempts {{(pid=63538) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11330}} [ 1430.137816] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1432.640913] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1432.641334] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1432.641451] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Starting heal instance info cache {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10017}} [ 1432.641611] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Rebuilding the list of instances to heal {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10021}} [ 1433.144826] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Didn't find any instances for network info cache update. {{(pid=63538) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10103}} [ 1433.145091] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1433.145266] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1433.145429] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1433.145575] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1433.145704] env[63538]: DEBUG nova.compute.manager [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63538) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10636}} [ 1433.145846] env[63538]: DEBUG oslo_service.periodic_task [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63538) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1433.649306] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.649657] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1433.649720] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.649927] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63538) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1433.650876] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e9590d-466f-43f8-87b3-3fca8879c2a4 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.660157] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53666409-5e65-444b-bb7a-2666fc253936 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.675698] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c5329fc-e66f-495e-8855-f6008c76d49c {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.683226] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e14e0f33-fbda-45a6-a09f-2a963abcec06 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.713111] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180355MB free_disk=95GB free_vcpus=48 pci_devices=None {{(pid=63538) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1433.713328] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.713496] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1434.733717] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63538) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1434.733977] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=100GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] stats={'failed_builds': '0'} {{(pid=63538) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1434.747256] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3453fe6d-1c3d-4ed9-beb2-bd42187c76f9 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.755207] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f17eda9b-9ae4-4230-a82f-943851451ca5 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.785228] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b24113-ec31-479c-8493-4a5f5a5f6896 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.792397] env[63538]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cddd724c-868c-4084-bcc5-2f46b52e11b1 {{(pid=63538) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.806862] env[63538]: DEBUG nova.compute.provider_tree [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Inventory has not changed in ProviderTree for provider: f65218a4-1d3d-476a-9093-01cae92c8635 {{(pid=63538) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1435.311079] env[63538]: DEBUG nova.scheduler.client.report [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Inventory has not changed for provider f65218a4-1d3d-476a-9093-01cae92c8635 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 95, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63538) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1435.815278] env[63538]: DEBUG nova.compute.resource_tracker [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63538) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1435.815646] env[63538]: DEBUG oslo_concurrency.lockutils [None req-6a3194d3-8bee-4137-aa37-01680329eae7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.102s {{(pid=63538) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}